From 27612bd3d60d20058e0cf911626639bc0a2606bb Mon Sep 17 00:00:00 2001 From: gaurav2733 <77378510+gaurav2733@users.noreply.github.com> Date: Fri, 22 Mar 2024 15:31:19 +0530 Subject: [PATCH 01/18] fix(ui/user-group): add non existent entity page for user (#10004) Co-authored-by: akarsh991 --- .../java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java | 3 ++- datahub-graphql-core/src/main/resources/entity.graphql | 5 +++++ datahub-web-react/src/app/entity/user/UserProfile.tsx | 6 ++++++ datahub-web-react/src/graphql/user.graphql | 1 + 4 files changed, 14 insertions(+), 1 deletion(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 481aed26c9f25..f5d8d063a315b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1726,7 +1726,8 @@ private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) .dataFetcher( - "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))); + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); builder.type( "CorpUserInfo", typeWiring -> diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index b939b86813e73..8fbf0b3712873 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -3777,6 +3777,11 @@ type CorpUser implements Entity { """ globalTags: GlobalTags @deprecated + """ + Whether or not this entity exists on DataHub + """ + exists: Boolean + """ Settings that a user can customize through the datahub ui """ diff --git a/datahub-web-react/src/app/entity/user/UserProfile.tsx b/datahub-web-react/src/app/entity/user/UserProfile.tsx index e8284ba61afe4..ffbfbeb977527 100644 --- a/datahub-web-react/src/app/entity/user/UserProfile.tsx +++ b/datahub-web-react/src/app/entity/user/UserProfile.tsx @@ -11,6 +11,7 @@ import { decodeUrn } from '../shared/utils'; import UserInfoSideBar from './UserInfoSideBar'; import { useEntityRegistry } from '../../useEntityRegistry'; import { ErrorSection } from '../../shared/error/ErrorSection'; +import NonExistentEntityPage from '../shared/entity/NonExistentEntityPage'; export interface Props { onTabChange: (selectedTab: string) => void; @@ -114,6 +115,11 @@ export default function UserProfile() { dataHubRoles: userRoles, urn, }; + + if (data?.corpUser?.exists === false) { + return ; + } + return ( <> {error && } diff --git a/datahub-web-react/src/graphql/user.graphql b/datahub-web-react/src/graphql/user.graphql index 82b8c2da7ffe0..a8a4e90284956 100644 --- a/datahub-web-react/src/graphql/user.graphql +++ b/datahub-web-react/src/graphql/user.graphql @@ -3,6 +3,7 @@ query getUser($urn: String!, $groupsCount: Int!) { urn username isNativeUser + exists info { active displayName From d573acde7e6e130257155310397d2e19db0e7ab0 Mon Sep 17 00:00:00 2001 From: Pinaki Bhattacharjee Date: Fri, 22 Mar 2024 20:14:43 +0530 Subject: [PATCH 02/18] fix(resolver): Allow users to add/remove related terms for children glossary terms (#9895) --- .../datahub/graphql/GmsGraphQLEngine.java | 6 +++-- .../glossary/AddRelatedTermsResolver.java | 6 ++++- .../glossary/RemoveRelatedTermsResolver.java | 7 ++++-- .../glossary/AddRelatedTermsResolverTest.java | 25 +++++++++++++------ .../RemoveRelatedTermsResolverTest.java | 13 +++++++--- 5 files changed, 40 insertions(+), 17 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index f5d8d063a315b..badbeee212ac7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1183,9 +1183,11 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) .dataFetcher( "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) + .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService, + this.entityClient)) .dataFetcher( - "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) + "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService, + this.entityClient)) .dataFetcher( "createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 31aa8b2ab9ddf..32c92fa6b84e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -12,6 +12,7 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; @@ -30,6 +31,7 @@ public class AddRelatedTermsResolver implements DataFetcher> { private final EntityService _entityService; + private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,10 +39,12 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final Urn urn = Urn.createFromString(input.getUrn()); return CompletableFuture.supplyAsync( () -> { - if (GlossaryUtils.canManageGlossaries(context)) { + final Urn parentUrn = GlossaryUtils.getParentUrn(urn, context, _entityClient); + if (GlossaryUtils.canManageChildrenEntities(context, parentUrn, _entityClient)) { try { final TermRelationshipType relationshipType = input.getRelationshipType(); final Urn urn = Urn.createFromString(input.getUrn()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index b1dd404e12465..09181c08c0af5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -11,6 +11,7 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; @@ -28,6 +29,7 @@ public class RemoveRelatedTermsResolver implements DataFetcher> { private final EntityService _entityService; + private final EntityClient _entityClient; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -35,13 +37,14 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final Urn urn = Urn.createFromString(input.getUrn()); return CompletableFuture.supplyAsync( () -> { - if (GlossaryUtils.canManageGlossaries(context)) { + final Urn parentUrn = GlossaryUtils.getParentUrn(urn, context, _entityClient); + if (GlossaryUtils.canManageChildrenEntities(context, parentUrn, _entityClient)) { try { final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); final List termUrnsToRemove = input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 8c5b1d7607027..509f776a01300 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -11,6 +11,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; @@ -39,6 +40,7 @@ private EntityService setUpService() { @Test public void testGetSuccessIsRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); @@ -47,7 +49,7 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -72,6 +74,7 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { @Test public void testGetSuccessHasRelatedNonExistent() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); @@ -80,7 +83,7 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -105,11 +108,12 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { @Test public void testGetFailAddSelfAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -126,11 +130,12 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { @Test public void testGetFailAddNonTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -147,13 +152,14 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { @Test public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) .thenReturn(false); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -170,13 +176,14 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { @Test public void testGetFailAddToNonExistentUrn() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(false); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -193,13 +200,14 @@ public void testGetFailAddToNonExistentUrn() throws Exception { @Test public void testGetFailAddToNonTerm() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(DATASET_URN)), eq(true))) .thenReturn(true); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_1_URN)), eq(true))) .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -216,6 +224,7 @@ public void testGetFailAddToNonTerm() throws Exception { @Test public void testFailNoPermissions() throws Exception { EntityService mockService = setUpService(); + EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); @@ -224,7 +233,7 @@ public void testFailNoPermissions() throws Exception { Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_TERM_2_URN)), eq(true))) .thenReturn(true); - AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService); + AddRelatedTermsResolver resolver = new AddRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index e46d8b1503d9e..f9a718dab0a2c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -13,6 +13,7 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; @@ -44,8 +45,9 @@ public void testGetSuccessIsA() throws Exception { Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -77,8 +79,9 @@ public void testGetSuccessHasA() throws Exception { Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -106,8 +109,9 @@ public void testFailAspectDoesNotExist() throws Exception { Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -137,8 +141,9 @@ public void testFailNoPermissions() throws Exception { Mockito.when(mockService.exists(eq(Urn.createFromString(TEST_ENTITY_URN)), eq(true))) .thenReturn(true); + EntityClient mockClient = Mockito.mock(EntityClient.class); - RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService); + RemoveRelatedTermsResolver resolver = new RemoveRelatedTermsResolver(mockService, mockClient); QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); From 332e417f7a7839b385c93356c15a32525d76ff7e Mon Sep 17 00:00:00 2001 From: jayasimhankv <145704974+jayasimhankv@users.noreply.github.com> Date: Fri, 22 Mar 2024 09:47:55 -0500 Subject: [PATCH 03/18] Increase role member count in listRoles query to 20 from 10 (#10020) Co-authored-by: Sam Black --- datahub-web-react/src/graphql/role.graphql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-web-react/src/graphql/role.graphql b/datahub-web-react/src/graphql/role.graphql index 05936c7cf6810..519d8c8419b8e 100644 --- a/datahub-web-react/src/graphql/role.graphql +++ b/datahub-web-react/src/graphql/role.graphql @@ -8,7 +8,7 @@ query listRoles($input: ListRolesInput!) { type name description - users: relationships(input: { types: ["IsMemberOfRole"], direction: INCOMING, start: 0, count: 10 }) { + users: relationships(input: { types: ["IsMemberOfRole"], direction: INCOMING, start: 0, count: 20 }) { start count total From e0cc3902be680afc99c16aa62ebb54519518c013 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20L=C3=BCdin?= <13187726+Masterchen09@users.noreply.github.com> Date: Fri, 22 Mar 2024 15:48:14 +0100 Subject: [PATCH 04/18] fix(frontend): exclude plugins/frontend/auth/user.props config does not exist warnings from log (#10043) --- datahub-frontend/conf/logback.xml | 1 + datahub-frontend/run/logback.xml | 1 + 2 files changed, 2 insertions(+) diff --git a/datahub-frontend/conf/logback.xml b/datahub-frontend/conf/logback.xml index 2a542083e20a2..78da231b4a71c 100644 --- a/datahub-frontend/conf/logback.xml +++ b/datahub-frontend/conf/logback.xml @@ -13,6 +13,7 @@ Unable to renew the session. The session store may not support this feature Preferred JWS algorithm: null not available. Using all metadata algorithms: + Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props diff --git a/datahub-frontend/run/logback.xml b/datahub-frontend/run/logback.xml index 9cabd3c923aa2..5d275c821e16f 100644 --- a/datahub-frontend/run/logback.xml +++ b/datahub-frontend/run/logback.xml @@ -13,6 +13,7 @@ Unable to renew the session. The session store may not support this feature Preferred JWS algorithm: null not available. Using all metadata algorithms: + Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props From 13f4993f90674440c5e4846976226d522e7fdbbf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Felix=20L=C3=BCdin?= <13187726+Masterchen09@users.noreply.github.com> Date: Fri, 22 Mar 2024 15:48:31 +0100 Subject: [PATCH 05/18] fix(ui): show dataset display name in browse paths v2 (#10054) --- datahub-web-react/src/graphql/browseV2.graphql | 5 +++++ datahub-web-react/src/graphql/fragments.graphql | 5 +++++ 2 files changed, 10 insertions(+) diff --git a/datahub-web-react/src/graphql/browseV2.graphql b/datahub-web-react/src/graphql/browseV2.graphql index f988b589bf362..2846dd3940416 100644 --- a/datahub-web-react/src/graphql/browseV2.graphql +++ b/datahub-web-react/src/graphql/browseV2.graphql @@ -27,6 +27,11 @@ query getBrowseResultsV2($input: BrowseV2Input!) { } instanceId } + ... on Dataset { + properties { + name + } + } } } start diff --git a/datahub-web-react/src/graphql/fragments.graphql b/datahub-web-react/src/graphql/fragments.graphql index 03283538847f7..be3b2e8620971 100644 --- a/datahub-web-react/src/graphql/fragments.graphql +++ b/datahub-web-react/src/graphql/fragments.graphql @@ -1066,6 +1066,11 @@ fragment browsePathV2Fields on BrowsePathV2 { } instanceId } + ... on Dataset { + properties { + name + } + } } } } From e48409402789736e2a116e2b1433e8dc229114ec Mon Sep 17 00:00:00 2001 From: trialiya <41265764+trialiya@users.noreply.github.com> Date: Fri, 22 Mar 2024 19:00:01 +0300 Subject: [PATCH 06/18] fix(metrics): get fieldName for GraphQL Mutation queries (#9972) --- .../java/com/datahub/graphql/GraphQLController.java | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index 1c4f2824c6357..184379d44a7ad 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -24,7 +24,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import javax.annotation.Nonnull; @@ -193,12 +192,14 @@ private void submitMetrics(ExecutionResult executionResult) { // Extract top level resolver, parent is top level query. Assumes single query per call. List> resolvers = (List>) executionData.get("resolvers"); - Optional> parentResolver = - resolvers.stream() - .filter(resolver -> resolver.get("parentType").equals("Query")) - .findFirst(); String fieldName = - parentResolver.isPresent() ? (String) parentResolver.get().get("fieldName") : "UNKNOWN"; + resolvers.stream() + .filter( + resolver -> List.of("Query", "Mutation").contains(resolver.get("parentType"))) + .findFirst() + .map(parentResolver -> parentResolver.get("fieldName")) + .map(Object::toString) + .orElse("UNKNOWN"); MetricUtils.get() .histogram(MetricRegistry.name(this.getClass(), fieldName)) .update(totalDuration); From a4418f150f658ac81295fea8ddb1735d469afebf Mon Sep 17 00:00:00 2001 From: Hendrik Richert Date: Fri, 22 Mar 2024 17:39:47 +0100 Subject: [PATCH 07/18] feat(UI): disable access management ui when no roles are linked to entity (#9610) Co-authored-by: Hendrik Richert --- datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx | 6 +++++- datahub-web-react/src/graphql/dataset.graphql | 7 +++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index d9dc6efa1a76a..ffb7f742a40f1 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -198,7 +198,11 @@ export class DatasetEntity implements Entity { component: AccessManagement, display: { visible: (_, _1) => this.appconfig().config.featureFlags.showAccessManagement, - enabled: (_, _2) => true, + enabled: (_, dataset: GetDatasetQuery) => { + const accessAspect = dataset?.dataset?.access; + const rolesList = accessAspect?.roles; + return !!accessAspect && !!rolesList && rolesList.length > 0; + }, }, }, { diff --git a/datahub-web-react/src/graphql/dataset.graphql b/datahub-web-react/src/graphql/dataset.graphql index 42c8f0939e975..694cf53d03423 100644 --- a/datahub-web-react/src/graphql/dataset.graphql +++ b/datahub-web-react/src/graphql/dataset.graphql @@ -103,6 +103,13 @@ fragment nonSiblingDatasetFields on Dataset { assertions(start: 0, count: 1) { total } + access { + roles { + role { + urn + } + } + } operations(limit: 1) { timestampMillis lastUpdatedTimestamp From 8cb65bdb76d4bea35689c9829fb54af90293c047 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 22 Mar 2024 11:51:24 -0500 Subject: [PATCH 08/18] ci(filters): add graphql code to backend trigger (#10113) --- .github/actions/ci-optimization/action.yml | 1 + .../com/linkedin/datahub/graphql/GmsGraphQLEngine.java | 9 +++++---- .../resolvers/glossary/AddRelatedTermsResolver.java | 1 - 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml index 6bb389318c857..cad3a03dcb464 100644 --- a/.github/actions/ci-optimization/action.yml +++ b/.github/actions/ci-optimization/action.yml @@ -70,6 +70,7 @@ runs: - "metadata-jobs/**" - "metadata-service/**" - "metadata-utils/**" + - "datahub-graphql-core/**" - "smoke-test/**" - "docker/**" kafka-setup: diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index badbeee212ac7..f5222525368e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1183,11 +1183,12 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) .dataFetcher( "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService, - this.entityClient)) .dataFetcher( - "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService, - this.entityClient)) + "addRelatedTerms", + new AddRelatedTermsResolver(this.entityService, this.entityClient)) + .dataFetcher( + "removeRelatedTerms", + new RemoveRelatedTermsResolver(this.entityService, this.entityClient)) .dataFetcher( "createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 32c92fa6b84e1..b6e8899a6a454 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -47,7 +47,6 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw if (GlossaryUtils.canManageChildrenEntities(context, parentUrn, _entityClient)) { try { final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); final List termUrns = input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); validateRelatedTermsInput(urn, termUrns); From 7315e6c40de0d280fa4813477dccab0c7a0a8560 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 22 Mar 2024 14:54:47 -0500 Subject: [PATCH 09/18] test(urn): add test case (#10112) --- .../graphql/types/mappers/MapperUtils.java | 27 ++++++++ .../types/mappers/MapperUtilsTest.java | 65 +++++++++++++++++++ .../metadata/entity/ValidationUtilsTest.java | 59 ++++++++++++++++- .../metadata/utils/EntityKeyUtilsTest.java | 38 +++++++++++ 4 files changed, 186 insertions(+), 3 deletions(-) create mode 100644 datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index b5733626468d6..701e2b3e0c595 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -11,6 +11,8 @@ import com.linkedin.datahub.graphql.generated.SearchSuggestion; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; +import com.linkedin.metadata.entity.validation.ValidationUtils; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; import java.net.URISyntaxException; @@ -18,6 +20,7 @@ import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -70,6 +73,7 @@ public static String convertFilterValue(String filterValue, List isEnti .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } + @Deprecated public static List getMatchedFieldEntry( List highlightMetadata) { return highlightMetadata.stream() @@ -91,6 +95,29 @@ public static List getMatchedFieldEntry( .collect(Collectors.toList()); } + public static List getMatchedFieldEntry( + @Nonnull EntityRegistry entityRegistry, + List highlightMetadata) { + return highlightMetadata.stream() + .map( + field -> { + MatchedField matchedField = new MatchedField(); + matchedField.setName(field.getName()); + matchedField.setValue(field.getValue()); + if (SearchUtils.isUrn(field.getValue())) { + try { + Urn urn = Urn.createFromString(field.getValue()); + ValidationUtils.validateUrn(entityRegistry, urn); + matchedField.setEntity(UrnToEntityMapper.map(urn)); + } catch (IllegalArgumentException | URISyntaxException e) { + log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); + } + } + return matchedField; + }) + .collect(Collectors.toList()); + } + public static SearchSuggestion mapSearchSuggestion( com.linkedin.metadata.search.SearchSuggestion suggestion) { return new SearchSuggestion( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java new file mode 100644 index 0000000000000..5e489d7da0f7c --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java @@ -0,0 +1,65 @@ +package com.linkedin.datahub.graphql.types.mappers; + +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + +import com.linkedin.common.urn.Urn; +import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.datahub.graphql.generated.MatchedField; +import com.linkedin.metadata.entity.validation.ValidationUtils; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.snapshot.Snapshot; +import java.net.URISyntaxException; +import java.util.List; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +public class MapperUtilsTest { + private EntityRegistry entityRegistry; + + @BeforeTest + public void setup() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + } + + @Test + public void testMatchedFieldValidation() throws URISyntaxException { + final Urn urn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:s3,urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29,PROD)"); + final Urn invalidUrn = + Urn.createFromString( + "urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29"); + assertThrows( + IllegalArgumentException.class, + () -> ValidationUtils.validateUrn(entityRegistry, invalidUrn)); + + List actualMatched = + MapperUtils.getMatchedFieldEntry( + entityRegistry, + List.of( + buildSearchMatchField(urn.toString()), + buildSearchMatchField(invalidUrn.toString()))); + + assertEquals(actualMatched.size(), 2, "Matched fields should be 2"); + assertEquals( + actualMatched.stream().filter(matchedField -> matchedField.getEntity() != null).count(), + 1, + "With urn should be 1"); + } + + private static com.linkedin.metadata.search.MatchedField buildSearchMatchField( + String highlightValue) { + com.linkedin.metadata.search.MatchedField field = + new com.linkedin.metadata.search.MatchedField(); + field.setName("testField"); + field.setValue(highlightValue); + return field; + } +} diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/ValidationUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/ValidationUtilsTest.java index f6f18ce915189..17eae455aa4c4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/ValidationUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/ValidationUtilsTest.java @@ -1,31 +1,56 @@ package com.linkedin.metadata.entity; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.linkedin.common.BrowsePath; +import com.linkedin.common.FabricType; import com.linkedin.common.Owner; import com.linkedin.common.OwnershipType; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.entity.validation.ValidationException; import com.linkedin.metadata.entity.validation.ValidationUtils; -import org.testng.Assert; +import com.linkedin.metadata.key.DatasetKey; +import com.linkedin.metadata.models.AspectSpec; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.snapshot.Snapshot; +import com.linkedin.metadata.utils.EntityKeyUtils; +import java.net.URISyntaxException; +import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; public class ValidationUtilsTest { + private EntityRegistry entityRegistry; + + @BeforeTest + public void setup() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + } + @Test public void testValidateOrThrowThrowsOnMissingUnrecognizedField() { DataMap rawMap = new DataMap(); rawMap.put("removed", true); rawMap.put("extraField", 1); Status status = new Status(rawMap); - Assert.assertThrows(ValidationException.class, () -> ValidationUtils.validateOrThrow(status)); + assertThrows(ValidationException.class, () -> ValidationUtils.validateOrThrow(status)); } @Test public void testValidateOrThrowThrowsOnMissingRequiredField() { DataMap rawMap = new DataMap(); BrowsePath status = new BrowsePath(rawMap); - Assert.assertThrows(ValidationException.class, () -> ValidationUtils.validateOrThrow(status)); + assertThrows(ValidationException.class, () -> ValidationUtils.validateOrThrow(status)); } @Test @@ -43,4 +68,32 @@ public void testValidateOrThrowDoesNotThrowOnMissingDefaultField() { Status status = new Status(rawMap); ValidationUtils.validateOrThrow(status); } + + @Test + public void testConvertEntityUrnToKeyUrlEncoded() throws URISyntaxException { + final Urn urn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:s3,urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29,PROD)"); + + ValidationUtils.validateUrn(entityRegistry, urn); + + final AspectSpec keyAspectSpec = + entityRegistry.getEntitySpec(urn.getEntityType()).getKeyAspectSpec(); + final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec); + + final DatasetKey expectedKey = new DatasetKey(); + expectedKey.setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); + expectedKey.setName( + "urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29"); + expectedKey.setOrigin(FabricType.PROD); + + assertEquals(actualKey, expectedKey); + + final Urn invalidUrn = + Urn.createFromString( + "urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29"); + assertThrows( + IllegalArgumentException.class, + () -> ValidationUtils.validateUrn(entityRegistry, invalidUrn)); + } } diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java index 7a6479a313244..38434838c0180 100644 --- a/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java +++ b/metadata-utils/src/test/java/com/linkedin/metadata/utils/EntityKeyUtilsTest.java @@ -4,16 +4,36 @@ import com.datahub.test.KeyPartEnum; import com.datahub.test.TestEntityKey; +import com.linkedin.common.FabricType; import com.linkedin.common.urn.Urn; +import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.metadata.key.DatasetKey; +import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.snapshot.Snapshot; +import java.net.URISyntaxException; import org.testng.Assert; +import org.testng.annotations.BeforeTest; import org.testng.annotations.Test; /** Tests the capabilities of {@link EntityKeyUtils} */ public class EntityKeyUtilsTest { + private EntityRegistry entityRegistry; + + @BeforeTest + public void setup() { + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() + .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); + entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + } + @Test public void testConvertEntityKeyToUrn() throws Exception { final TestEntityKey key = new TestEntityKey(); @@ -59,4 +79,22 @@ public void testConvertEntityUrnToKey() throws Exception { EntityKeyUtils.convertUrnToEntityKey(urn, entitySpec.getKeyAspectSpec()); Assert.assertEquals(actualKey.data(), expectedKey.data()); } + + @Test + public void testConvertEntityUrnToKeyUrlEncoded() throws URISyntaxException { + final Urn urn = + Urn.createFromString( + "urn:li:dataset:(urn:li:dataPlatform:s3,urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29,PROD)"); + final AspectSpec keyAspectSpec = + entityRegistry.getEntitySpec(urn.getEntityType()).getKeyAspectSpec(); + final RecordTemplate actualKey = EntityKeyUtils.convertUrnToEntityKey(urn, keyAspectSpec); + + final DatasetKey expectedKey = new DatasetKey(); + expectedKey.setPlatform(Urn.createFromString("urn:li:dataPlatform:s3")); + expectedKey.setName( + "urn:li:dataset:%28urn:li:dataPlatform:s3%2Ctest-datalake-concepts/prog_maintenance%2CPROD%29"); + expectedKey.setOrigin(FabricType.PROD); + + assertEquals(actualKey, expectedKey); + } } From 36e12f7524c882c324f67ede68a02829ff7ea5b6 Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Fri, 22 Mar 2024 15:01:21 -0700 Subject: [PATCH 10/18] fix(ui) Add min width to the usage stats component (#10056) --- .../components/SchemaFieldDrawer/FieldUsageStats.tsx | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx index 2f7288904b2df..355c3c25fefea 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Dataset/Schema/components/SchemaFieldDrawer/FieldUsageStats.tsx @@ -49,7 +49,12 @@ export default function FieldUsageStats({ expandedField }: Props) { Usage - + {relevantUsageStats.count || 0} queries / month From 35cf4f89e527b924afbe6462ce7fe9b5df898b65 Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 22 Mar 2024 21:06:12 -0500 Subject: [PATCH 11/18] log(system-update): Update DataHubStartupStep.java (#9971) --- .../upgrade/system/elasticsearch/steps/DataHubStartupStep.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java index b4a506c3f5c63..d2b5965a3109c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java @@ -33,7 +33,7 @@ public Function executable() { DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = new DataHubUpgradeHistoryEvent().setVersion(_version); _kafkaEventProducer.produceDataHubUpgradeHistoryEvent(dataHubUpgradeHistoryEvent); - log.info("Initiating startup for version: {}", _version); + log.info("System Update finished for version: {}", _version); } catch (Exception e) { log.error("DataHubStartupStep failed.", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); From 93b5907f99eb87650c7569b296d1e6f976c3d4bc Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Fri, 22 Mar 2024 21:47:00 -0500 Subject: [PATCH 12/18] fix(usage-stats): usage-stats error handling and filter (#10105) --- .../resolvers/dataset/DatasetUsageStatsResolver.java | 7 +++++-- .../graphql/types/usage/UsageQueryResultMapper.java | 5 +++++ .../java/com/linkedin/metadata/search/utils/ESUtils.java | 3 ++- .../com/linkedin/metadata/resources/usage/UsageStats.java | 6 +++--- 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index 371911913cab2..03d08024eb73b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -8,6 +8,7 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.UsageQueryResult; import com.linkedin.datahub.graphql.types.usage.UsageQueryResultMapper; +import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.usage.UsageClient; import com.linkedin.usage.UsageTimeRange; import graphql.schema.DataFetcher; @@ -45,9 +46,11 @@ public CompletableFuture get(DataFetchingEnvironment environme usageClient.getUsageStats(resourceUrn.toString(), range); return UsageQueryResultMapper.map(usageQueryResult); } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + log.error(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + MetricUtils.counter(this.getClass(), "usage_stats_dropped").inc(); } + + return UsageQueryResultMapper.EMPTY; }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index 444605cd99377..b2758adc9b8b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,13 +1,18 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.UsageQueryResult; +import com.linkedin.datahub.graphql.generated.UsageQueryResultAggregations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; public class UsageQueryResultMapper implements ModelMapper { + public static final UsageQueryResult EMPTY = + new UsageQueryResult(List.of(), new UsageQueryResultAggregations(0, List.of(), List.of(), 0)); + public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); public static UsageQueryResult map( diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 6c4507216482f..3263773ca064e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -188,7 +188,7 @@ public static BoolQueryBuilder buildConjunctiveFilterQuery( .forEach( criterion -> { if (Set.of(Condition.EXISTS, Condition.IS_NULL).contains(criterion.getCondition()) - || !criterion.getValue().trim().isEmpty() + || (criterion.hasValue() && !criterion.getValue().trim().isEmpty()) || criterion.hasValues()) { if (!criterion.isNegated()) { // `filter` instead of `must` (enables caching and bypasses scoring) @@ -646,6 +646,7 @@ private static RangeQueryBuilder buildRangeQueryFromCriterion( *

For all new code, we should be using the new 'values' field for performing multi-match. This * is simply retained for backwards compatibility of the search API. */ + @Deprecated private static QueryBuilder buildEqualsFromCriterionWithValue( @Nonnull final String fieldName, @Nonnull final Criterion criterion, diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java index 554b6e909e9e3..f80dfa5ce0f23 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java @@ -386,14 +386,14 @@ public Task query( Filter filter = new Filter(); ArrayList criteria = new ArrayList<>(); Criterion hasUrnCriterion = - new Criterion().setField("urn").setCondition(Condition.EQUAL).setValue(resource); + new Criterion().setField("urn").setCondition(Condition.EQUAL).setValues(new StringArray(resource)); criteria.add(hasUrnCriterion); if (startTime != null) { Criterion startTimeCriterion = new Criterion() .setField(ES_FIELD_TIMESTAMP) .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(startTime.toString()); + .setValues(new StringArray(startTime.toString())); criteria.add(startTimeCriterion); } if (endTime != null) { @@ -401,7 +401,7 @@ public Task query( new Criterion() .setField(ES_FIELD_TIMESTAMP) .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(endTime.toString()); + .setValues(new StringArray(endTime.toString())); criteria.add(endTimeCriterion); } From a0d952d4a200cbebc577e0770c68cfcdf643acda Mon Sep 17 00:00:00 2001 From: Davi Arnaut Date: Sat, 23 Mar 2024 04:11:39 -0700 Subject: [PATCH 13/18] fix(elasticsearch logging): log how long bulk execution took (#10116) --- .../search/elasticsearch/update/BulkListener.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java index b49218f4224a9..274829df53ba8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/update/BulkListener.java @@ -39,12 +39,19 @@ public void beforeBulk(long executionId, BulkRequest request) { @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { + String ingestTook = ""; + long ingestTookInMillis = response.getIngestTookInMillis(); + if (ingestTookInMillis != BulkResponse.NO_INGEST_TOOK) { + ingestTook = " Bulk ingest preprocessing took time ms: " + ingestTookInMillis; + } + if (response.hasFailures()) { log.error( "Failed to feed bulk request. Number of events: " + response.getItems().length + " Took time ms: " - + response.getIngestTookInMillis() + + response.getTook().getMillis() + + ingestTook + " Message: " + response.buildFailureMessage()); } else { @@ -52,7 +59,8 @@ public void afterBulk(long executionId, BulkRequest request, BulkResponse respon "Successfully fed bulk request. Number of events: " + response.getItems().length + " Took time ms: " - + response.getIngestTookInMillis()); + + response.getTook().getMillis() + + ingestTook); } incrementMetrics(response); } From f9e64d03cc6c16f5a9dd4b64f8ee5dfddcfcc0ef Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Sat, 23 Mar 2024 06:15:36 -0500 Subject: [PATCH 14/18] feat(auth): view authorization (#10066) --- .../io/datahubproject/OpenApiEntities.java | 32 + datahub-frontend/app/auth/AuthModule.java | 2 +- datahub-graphql-core/build.gradle | 2 +- .../datahub/graphql/GmsGraphQLEngine.java | 19 +- .../datahub/graphql/GmsGraphQLEngineArgs.java | 4 +- .../analytics/service/AnalyticsService.java | 3 +- .../analytics/service/AnalyticsUtil.java | 2 +- .../authorization/AuthorizationUtils.java | 282 +++-- .../datahub/graphql/resolvers/AuthUtils.java | 32 - .../datahub/graphql/resolvers/MeResolver.java | 37 +- .../assertion/AssertionRunEventResolver.java | 4 +- .../assertion/DeleteAssertionResolver.java | 5 +- .../assertion/EntityAssertionsResolver.java | 2 +- .../resolvers/chart/BrowseV2Resolver.java | 10 +- .../container/ContainerEntitiesResolver.java | 1 + .../container/ParentContainersResolver.java | 9 +- .../DashboardStatsSummaryResolver.java | 13 +- .../DashboardUsageStatsResolver.java | 15 +- .../dashboard/DashboardUsageStatsUtils.java | 7 +- .../CreateDataProductResolver.java | 2 +- .../ListDataProductAssetsResolver.java | 3 +- .../UpdateDataProductResolver.java | 2 +- .../dataset/DatasetStatsSummaryResolver.java | 2 +- .../dataset/DatasetUsageStatsResolver.java | 4 +- .../UpdateDeprecationResolver.java | 4 +- .../domain/DomainEntitiesResolver.java | 1 + .../domain/ParentDomainsResolver.java | 17 +- .../entity/EntityPrivilegesResolver.java | 27 +- .../glossary/ParentNodesResolver.java | 22 +- .../incident/EntityIncidentsResolver.java | 2 +- .../incident/RaiseIncidentResolver.java | 2 +- .../UpdateIncidentStatusResolver.java | 4 +- .../resolvers/ingest/IngestionAuthUtils.java | 27 +- .../ingest/IngestionResolverUtils.java | 11 +- .../GetIngestionExecutionRequestResolver.java | 2 +- ...estionSourceExecutionRequestsResolver.java | 1 + .../ingest/secret/CreateSecretResolver.java | 2 +- .../secret/GetSecretValuesResolver.java | 2 +- .../ingest/secret/UpdateSecretResolver.java | 2 +- .../resolvers/jobs/DataJobRunsResolver.java | 2 +- .../resolvers/jobs/EntityRunsResolver.java | 2 +- .../lineage/UpdateLineageResolver.java | 17 +- .../load/EntityLineageResultResolver.java | 24 +- .../EntityRelationshipsResultResolver.java | 25 +- .../load/TimeSeriesAspectResolver.java | 24 +- .../resolvers/mutate/util/DeleteUtils.java | 25 +- .../resolvers/mutate/util/DomainUtils.java | 2 +- .../resolvers/mutate/util/GlossaryUtils.java | 6 +- .../operation/ReportOperationResolver.java | 2 +- .../UpdateOwnershipTypeResolver.java | 9 +- .../policy/ListPoliciesResolver.java | 8 +- .../resolvers/policy/PolicyAuthUtils.java | 16 +- .../policy/UpsertPolicyResolver.java | 2 +- .../mappers/PolicyInfoPolicyMapper.java | 26 +- .../mappers/PolicyUpdateInputInfoMapper.java | 10 +- .../resolvers/post/ListPostsResolver.java | 4 +- .../resolvers/query/CreateQueryResolver.java | 3 +- .../resolvers/query/UpdateQueryResolver.java | 3 +- .../ListRecommendationsResolver.java | 9 +- .../resolvers/role/ListRolesResolver.java | 8 +- .../AggregateAcrossEntitiesResolver.java | 9 +- .../search/GetQuickFiltersResolver.java | 22 +- .../search/ScrollAcrossEntitiesResolver.java | 3 +- .../search/ScrollAcrossLineageResolver.java | 1 + .../search/SearchAcrossEntitiesResolver.java | 3 +- .../search/SearchAcrossLineageResolver.java | 4 +- .../resolvers/search/SearchResolver.java | 3 +- .../graphql/resolvers/search/SearchUtils.java | 4 +- .../UpsertStructuredPropertiesResolver.java | 2 +- .../resolvers/tag/SetTagColorResolver.java | 4 +- .../graphql/resolvers/test/TestUtils.java | 5 +- .../resolvers/user/ListUsersResolver.java | 8 +- .../resolvers/view/UpdateViewResolver.java | 9 +- .../graphql/resolvers/view/ViewUtils.java | 4 +- .../graphql/types/aspect/AspectMapper.java | 16 +- .../graphql/types/aspect/AspectType.java | 17 +- .../types/assertion/AssertionMapper.java | 16 +- .../types/assertion/AssertionType.java | 4 +- .../types/auth/AccessTokenMetadataType.java | 4 +- .../mappers/AccessTokenMetadataMapper.java | 10 +- .../graphql/types/chart/ChartType.java | 21 +- .../types/chart/mappers/ChartMapper.java | 105 +- .../chart/mappers/ChartUpdateInputMapper.java | 18 +- .../chart/mappers/InputFieldsMapper.java | 14 +- .../common/mappers/AuditStampMapper.java | 10 +- .../common/mappers/BrowsePathsV2Mapper.java | 18 +- .../mappers/ChangeAuditStampsMapper.java | 16 +- .../types/common/mappers/CostMapper.java | 11 +- .../types/common/mappers/CostValueMapper.java | 10 +- .../DataPlatformInstanceAspectMapper.java | 9 +- .../common/mappers/DeprecationMapper.java | 10 +- .../types/common/mappers/EmbedMapper.java | 10 +- .../mappers/GroupingCriterionInputMapper.java | 9 +- .../mappers/InstitutionalMemoryMapper.java | 14 +- .../InstitutionalMemoryMetadataMapper.java | 8 +- ...stitutionalMemoryMetadataUpdateMapper.java | 9 +- .../InstitutionalMemoryUpdateMapper.java | 12 +- .../types/common/mappers/OperationMapper.java | 10 +- .../types/common/mappers/OwnerMapper.java | 15 +- .../common/mappers/OwnerUpdateMapper.java | 8 +- .../types/common/mappers/OwnershipMapper.java | 16 +- .../common/mappers/OwnershipSourceMapper.java | 9 +- .../common/mappers/OwnershipUpdateMapper.java | 18 +- .../mappers/SearchFlagsInputMapper.java | 11 +- .../types/common/mappers/SiblingsMapper.java | 17 +- .../types/common/mappers/StatusMapper.java | 10 +- .../types/common/mappers/StringMapMapper.java | 10 +- .../types/common/mappers/SubTypesMapper.java | 9 +- .../common/mappers/UrnToEntityMapper.java | 9 +- .../common/mappers/util/MappingHelper.java | 13 + .../types/container/ContainerType.java | 8 +- .../container/mappers/ContainerMapper.java | 29 +- .../types/corpgroup/CorpGroupType.java | 10 +- .../CorpGroupEditablePropertiesMapper.java | 6 +- .../mappers/CorpGroupInfoMapper.java | 8 +- .../corpgroup/mappers/CorpGroupMapper.java | 39 +- .../mappers/CorpGroupPropertiesMapper.java | 8 +- .../graphql/types/corpuser/CorpUserType.java | 10 +- .../mappers/CorpUserEditableInfoMapper.java | 6 +- .../corpuser/mappers/CorpUserInfoMapper.java | 11 +- .../corpuser/mappers/CorpUserMapper.java | 36 +- .../mappers/CorpUserPropertiesMapper.java | 8 +- .../mappers/CorpUserStatusMapper.java | 8 +- .../types/dashboard/DashboardType.java | 19 +- .../dashboard/mappers/DashboardMapper.java | 108 +- .../mappers/DashboardUpdateInputMapper.java | 18 +- .../mappers/DashboardUsageMetricMapper.java | 10 +- .../graphql/types/dataflow/DataFlowType.java | 19 +- .../dataflow/mappers/DataFlowMapper.java | 53 +- .../mappers/DataFlowUpdateInputMapper.java | 18 +- .../graphql/types/datajob/DataJobType.java | 20 +- .../types/datajob/mappers/DataJobMapper.java | 48 +- .../mappers/DataJobUpdateInputMapper.java | 18 +- .../types/dataplatform/DataPlatformType.java | 2 +- .../mappers/DataPlatformInfoMapper.java | 9 +- .../mappers/DataPlatformMapper.java | 12 +- .../mappers/DataPlatformPropertiesMapper.java | 6 +- .../DataPlatformInstanceType.java | 4 +- .../mappers/DataPlatformInstanceMapper.java | 27 +- .../mappers/DataProcessInstanceMapper.java | 16 +- .../DataProcessInstanceRunEventMapper.java | 10 +- .../DataProcessInstanceRunResultMapper.java | 8 +- .../types/dataproduct/DataProductType.java | 6 +- .../mappers/DataProductMapper.java | 32 +- .../graphql/types/dataset/DatasetType.java | 41 +- .../types/dataset/VersionedDatasetType.java | 2 +- .../mappers/AssertionRunEventMapper.java | 25 +- .../mappers/DatasetDeprecationMapper.java | 9 +- .../dataset/mappers/DatasetFilterMapper.java | 10 +- .../types/dataset/mappers/DatasetMapper.java | 77 +- .../dataset/mappers/DatasetProfileMapper.java | 8 +- .../mappers/DatasetUpdateInputMapper.java | 26 +- .../EditableSchemaFieldInfoMapper.java | 19 +- .../mappers/EditableSchemaMetadataMapper.java | 14 +- .../mappers/ForeignKeyConstraintMapper.java | 17 +- .../dataset/mappers/PlatformSchemaMapper.java | 10 +- .../dataset/mappers/SchemaFieldMapper.java | 19 +- .../types/dataset/mappers/SchemaMapper.java | 18 +- .../dataset/mappers/SchemaMetadataMapper.java | 27 +- .../mappers/VersionedDatasetMapper.java | 66 +- .../types/datatype/DataTypeEntityMapper.java | 10 +- .../graphql/types/datatype/DataTypeType.java | 2 +- .../types/domain/DomainAssociationMapper.java | 18 +- .../graphql/types/domain/DomainMapper.java | 21 +- .../graphql/types/domain/DomainType.java | 6 +- .../entitytype/EntityTypeEntityMapper.java | 10 +- .../types/entitytype/EntityTypeType.java | 2 +- .../CreateERModelRelationshipResolver.java | 3 +- .../ERModelRelationshipType.java | 10 +- .../UpdateERModelRelationshipResolver.java | 2 +- .../mappers/ERModelRelationMapper.java | 35 +- .../ERModelRelationshipUpdateInputMapper.java | 8 +- .../graphql/types/form/FormMapper.java | 11 +- .../datahub/graphql/types/form/FormType.java | 4 +- .../types/glossary/GlossaryNodeType.java | 4 +- .../types/glossary/GlossaryTermType.java | 12 +- .../glossary/mappers/GlossaryNodeMapper.java | 23 +- .../glossary/mappers/GlossaryTermMapper.java | 37 +- .../glossary/mappers/GlossaryTermsMapper.java | 13 +- .../types/incident/IncidentMapper.java | 20 +- .../graphql/types/incident/IncidentType.java | 2 +- .../mappers/AutoCompleteResultsMapper.java | 12 +- .../types/mappers/BrowsePathMapper.java | 8 +- .../types/mappers/BrowsePathsMapper.java | 12 +- .../types/mappers/BrowseResultMapper.java | 10 +- .../types/mappers/InputModelMapper.java | 5 +- .../graphql/types/mappers/MapperUtils.java | 43 +- .../graphql/types/mappers/ModelMapper.java | 6 +- .../UrnScrollAcrossLineageResultsMapper.java | 30 +- .../types/mappers/UrnScrollResultsMapper.java | 14 +- .../UrnSearchAcrossLineageResultsMapper.java | 30 +- .../types/mappers/UrnSearchResultsMapper.java | 14 +- .../types/mlmodel/MLFeatureTableType.java | 10 +- .../graphql/types/mlmodel/MLFeatureType.java | 6 +- .../types/mlmodel/MLModelGroupType.java | 10 +- .../graphql/types/mlmodel/MLModelType.java | 10 +- .../types/mlmodel/MLPrimaryKeyType.java | 7 +- .../types/mlmodel/mappers/BaseDataMapper.java | 10 +- .../CaveatsAndRecommendationsMapper.java | 8 +- .../mlmodel/mappers/CaveatsDetailsMapper.java | 10 +- .../mappers/EthicalConsiderationsMapper.java | 6 +- .../mappers/HyperParameterMapMapper.java | 12 +- .../HyperParameterValueTypeMapper.java | 6 +- .../mlmodel/mappers/IntendedUseMapper.java | 12 +- .../mlmodel/mappers/MLFeatureMapper.java | 71 +- .../mappers/MLFeaturePropertiesMapper.java | 8 +- .../mlmodel/mappers/MLFeatureTableMapper.java | 66 +- .../MLFeatureTablePropertiesMapper.java | 12 +- .../mlmodel/mappers/MLHyperParamMapper.java | 10 +- .../types/mlmodel/mappers/MLMetricMapper.java | 10 +- .../mappers/MLModelFactorPromptsMapper.java | 10 +- .../mlmodel/mappers/MLModelFactorsMapper.java | 6 +- .../mlmodel/mappers/MLModelGroupMapper.java | 69 +- .../mappers/MLModelGroupPropertiesMapper.java | 8 +- .../types/mlmodel/mappers/MLModelMapper.java | 91 +- .../mappers/MLModelPropertiesMapper.java | 20 +- .../mlmodel/mappers/MLPrimaryKeyMapper.java | 72 +- .../mappers/MLPrimaryKeyPropertiesMapper.java | 8 +- .../types/mlmodel/mappers/MetricsMapper.java | 10 +- .../mappers/QuantitativeAnalysesMapper.java | 11 +- .../mlmodel/mappers/ResultsTypeMapper.java | 10 +- .../mlmodel/mappers/SourceCodeUrlMapper.java | 10 +- .../mlmodel/mappers/VersionTagMapper.java | 9 +- .../graphql/types/notebook/NotebookType.java | 18 +- .../notebook/mappers/NotebookMapper.java | 104 +- .../mappers/NotebookUpdateInputMapper.java | 16 +- .../types/ownership/OwnershipType.java | 2 +- .../types/ownership/OwnershipTypeMapper.java | 11 +- .../types/policy/DataHubPolicyMapper.java | 31 +- .../types/policy/DataHubPolicyType.java | 2 +- .../graphql/types/post/PostMapper.java | 10 +- .../graphql/types/query/QueryMapper.java | 17 +- .../graphql/types/query/QueryType.java | 4 +- .../DataFlowDataJobsRelationshipsMapper.java | 8 +- .../DownstreamEntityRelationshipsMapper.java | 8 +- .../EntityRelationshipLegacyMapper.java | 10 +- .../UpstreamEntityRelationshipsMapper.java | 8 +- .../types/restricted/RestrictedMapper.java | 2 +- .../types/restricted/RestrictedType.java | 2 +- .../graphql/types/role/DataHubRoleType.java | 2 +- .../types/role/mappers/DataHubRoleMapper.java | 10 +- .../graphql/types/rolemetadata/RoleType.java | 8 +- .../rolemetadata/mappers/RoleMapper.java | 9 +- .../types/schemafield/SchemaFieldMapper.java | 18 +- .../types/schemafield/SchemaFieldType.java | 2 +- .../StructuredPropertiesMapper.java | 23 +- .../StructuredPropertyMapper.java | 10 +- .../StructuredPropertyType.java | 2 +- .../datahub/graphql/types/tag/TagType.java | 18 +- .../types/tag/mappers/GlobalTagsMapper.java | 26 +- .../mappers/TagAssociationUpdateMapper.java | 11 +- .../graphql/types/tag/mappers/TagMapper.java | 21 +- .../tag/mappers/TagUpdateInputMapper.java | 12 +- .../datahub/graphql/types/test/TestType.java | 2 +- .../types/usage/FieldUsageCountsMapper.java | 9 +- .../types/usage/UsageAggregationMapper.java | 9 +- .../usage/UsageAggregationMetricsMapper.java | 10 +- .../UsageQueryResultAggregationMapper.java | 10 +- .../types/usage/UsageQueryResultMapper.java | 13 +- .../types/usage/UserUsageCountsMapper.java | 6 +- .../graphql/types/view/DataHubViewMapper.java | 9 +- .../graphql/types/view/DataHubViewType.java | 2 +- .../linkedin/datahub/graphql/TestUtils.java | 6 +- .../ParentContainersResolverTest.java | 6 + .../domain/ParentDomainsResolverTest.java | 6 + .../glossary/ParentNodesResolverTest.java | 9 + .../ingest/IngestionAuthUtilsTest.java | 19 +- .../secret/CreateSecretResolverTest.java | 2 +- .../secret/GetSecretValuesResolverTest.java | 2 +- .../secret/UpdateSecretResolverTest.java | 2 +- .../mutate/MutableTypeBatchResolverTest.java | 2 + .../AutoCompleteForMultipleResolverTest.java | 3 + .../search/GetQuickFiltersResolverTest.java | 2 +- .../types/assertion/AssertionTypeTest.java | 7 + .../types/common/mappers/EmbedMapperTest.java | 3 +- .../types/container/ContainerTypeTest.java | 19 +- .../DataPlatformInstanceTest.java | 4 + .../dataset/mappers/DatasetMapperTest.java | 6 +- .../mappers/DatasetProfileMapperTest.java | 4 +- .../types/incident/IncidentMapperTest.java | 2 +- .../types/incident/IncidentTypeTest.java | 7 + .../types/mappers/MapperUtilsTest.java | 10 +- .../types/notebook/NotebookTypeTest.java | 7 + .../graphql/types/query/QueryTypeTest.java | 11 + .../types/view/DataHubViewTypeTest.java | 11 + datahub-upgrade/build.gradle | 2 +- .../config/BackfillOwnershipTypesConfig.java | 28 - .../system/ownershiptypes/OwnershipTypes.java | 41 - .../ownershiptypes/OwnershipTypesStep.java | 276 ----- .../shared/tabs/Entity/DataJobFlowTab.tsx | 2 +- docker/profiles/docker-compose.actions.yml | 5 +- docker/profiles/docker-compose.frontend.yml | 5 +- docker/profiles/docker-compose.gms.yml | 21 +- .../events/metadata-change-log-event.md | 4 +- docs/advanced/mcp-mcl.md | 2 +- docs/api/openapi/openapi-usage-guide.md | 32 +- docs/authorization/policies.md | 44 +- .../metadata/aspect/AspectRetriever.java | 21 + .../metadata/aspect/batch/AspectsBatch.java | 2 +- .../metadata/aspect/batch/MCPItem.java | 7 +- .../metadata/aspect/hooks/OwnerTypeMap.java | 111 -- .../metadata/aspect/plugins/PluginSpec.java | 5 +- .../plugins/config/AspectPluginConfig.java | 2 + .../CreateIfNotExistsValidator.java | 75 ++ .../StructuredPropertiesValidator.java | 4 +- .../aspect/hooks/OwnerTypeMapTest.java | 220 ---- .../CreateIfNotExistsValidatorTest.java | 161 +++ metadata-auth/auth-api/build.gradle | 8 +- .../com/datahub/authentication/Actor.java | 18 + .../com/datahub/authorization/AuthUtil.java | 653 +++++++++- .../ConjunctivePrivilegeGroup.java | 20 + .../DisjunctivePrivilegeGroup.java | 32 +- ...va => ViewAuthorizationConfiguration.java} | 6 +- .../datahub/authorization/AuthUtilTest.java | 256 ++++ .../resources/MetadataChangeProposal.avsc | 9 +- .../resources/MetadataChangeProposal.avsc | 9 +- metadata-io/build.gradle | 3 +- .../aspect/utils/DefaultAspectsUtil.java | 57 +- .../metadata/client/JavaEntityClient.java | 2 +- .../metadata/entity/EntityServiceImpl.java | 174 ++- .../entity/ebean/batch/AspectsBatchImpl.java | 2 + .../entity/ebean/batch/ChangeItemImpl.java | 39 +- .../entity/validation/ValidationUtils.java | 262 ++++ .../metadata/graph/SiblingGraphService.java | 2 +- .../candidatesource/MostPopularSource.java | 6 +- .../metadata/search/LineageSearchService.java | 43 +- .../metadata/search/SearchService.java | 12 +- .../indexbuilder/MappingsBuilder.java | 7 + .../elasticsearch/query/ESSearchDAO.java | 13 +- .../request/AutocompleteRequestHandler.java | 15 +- .../query/request/SearchQueryBuilder.java | 3 +- .../query/request/SearchRequestHandler.java | 10 +- .../SearchDocumentTransformer.java | 34 +- .../search/utils/ESAccessControlUtil.java | 263 +--- .../metadata/search/utils/ESUtils.java | 2 - .../service/UpdateIndicesService.java | 48 +- .../aspect/utils/DefaultAspectsUtilTest.java | 9 +- .../metadata/entity/EntityServiceTest.java | 50 + .../sibling/SiblingGraphServiceTest.java | 7 + .../RecommendationsServiceTest.java | 10 +- ...ySearchAggregationCandidateSourceTest.java | 48 +- .../RecommendationUtilsTest.java | 6 +- .../search/LineageServiceTestBase.java | 3 +- .../search/SearchServiceTestBase.java | 3 +- .../search/fixtures/GoldenTestBase.java | 2 +- .../fixtures/LineageDataFixtureTestBase.java | 2 +- .../fixtures/SampleDataFixtureTestBase.java | 2 +- .../indexbuilder/MappingsBuilderTest.java | 3 +- .../search/query/SearchDAOTestBase.java | 4 +- .../request/SearchRequestHandlerTest.java | 8 +- .../search/utils/ESAccessControlUtilTest.java | 1091 ++++++++--------- .../io/datahubproject/test/DataGenerator.java | 6 +- .../test/search/SearchTestUtils.java | 3 +- ...eConsumerApplicationTestConfiguration.java | 6 + metadata-jobs/mae-consumer/build.gradle | 2 +- .../elasticsearch/ElasticsearchConnector.java | 3 +- .../kafka/hook/form/FormAssignmentHook.java | 3 +- .../hook/incident/IncidentsSummaryHook.java | 3 +- .../ingestion/IngestionSchedulerHook.java | 1 + .../spring/MCLSpringTestConfiguration.java | 4 +- metadata-jobs/mce-consumer-job/build.gradle | 1 + metadata-jobs/mce-consumer/build.gradle | 2 +- .../linkedin/events/metadata/ChangeType.pdl | 8 +- .../src/main/resources/entity-registry.yml | 18 +- .../metadata/context/EmptyContext.java | 12 + .../metadata/context/OperationContext.java | 86 +- .../context/OperationContextConfig.java | 6 +- .../metadata/context/RequestContext.java | 88 ++ .../context/ServicesRegistryContext.java | 19 + .../context/ViewAuthorizationContext.java | 38 + .../metadata/services}/RestrictedService.java | 14 +- .../metadata/services}/SecretService.java | 2 +- .../context/TestOperationContexts.java | 59 +- .../context/OperationContextTest.java | 9 +- .../AuthorizationConfiguration.java | 4 +- .../invite/InviteTokenService.java | 2 +- .../user/NativeUserService.java | 2 +- .../authorization/DataHubAuthorizer.java | 105 +- .../datahub/authorization/PolicyEngine.java | 49 +- .../datahub/telemetry/TrackingService.java | 2 +- .../invite/InviteTokenServiceTest.java | 2 +- .../user/NativeUserServiceTest.java | 2 +- .../authorization/DataHubAuthorizerTest.java | 4 +- .../telemetry/TrackingServiceTest.java | 2 +- .../authentication/AuthServiceController.java | 2 +- .../AuthServiceTestConfiguration.java | 2 +- .../src/main/resources/application.yml | 6 +- metadata-service/factories/build.gradle | 1 + .../auth/InviteTokenServiceFactory.java | 2 +- .../auth/NativeUserServiceFactory.java | 2 +- .../SystemOperationContextFactory.java | 10 +- .../services}/RestrictedServiceFactory.java | 10 +- .../services}/SecretServiceFactory.java | 4 +- .../entity/RollbackServiceFactory.java | 7 +- .../factory/graphql/GraphQLEngineFactory.java | 8 +- .../telemetry/TrackingServiceFactory.java | 2 +- .../gms/factory/usage/UsageClientFactory.java | 6 +- .../boot/steps/IngestPoliciesStep.java | 13 +- .../boot/steps/RemoveClientIdAspectStep.java | 3 +- .../secret/SecretServiceFactoryTest.java | 3 +- .../datahub/graphql/GraphQLController.java | 9 +- .../datahub/graphql/SpringQueryContext.java | 26 +- .../delegates/DatahubUsageEventsImpl.java | 25 +- .../v2/delegates/EntityApiDelegateImpl.java | 235 +++- .../JavaSpring/apiController.mustache | 5 +- .../delegates/EntityApiDelegateImplTest.java | 30 +- .../openapi/entities/EntitiesController.java | 100 +- .../controller/LineageApiImpl.java | 4 - .../elastic/OperationsController.java | 119 +- .../entities/PlatformEntitiesController.java | 44 +- .../RelationshipsController.java | 29 +- .../openapi/timeline/TimelineController.java | 4 +- .../openapi/util/MappingUtil.java | 72 +- .../v2/controller/EntityController.java | 198 ++- .../v2/controller/RelationshipController.java | 108 +- .../v2/controller/TimeseriesController.java | 35 +- .../openapi/v2/utils/ControllerUtil.java | 67 - .../java/entities/EntitiesControllerTest.java | 2 +- .../src/test/resources/application.properties | 4 +- .../com.linkedin.entity.runs.restspec.json | 2 +- .../com.linkedin.entity.aspects.snapshot.json | 13 +- ...com.linkedin.entity.entities.snapshot.json | 4 +- .../com.linkedin.entity.runs.snapshot.json | 6 +- ...nkedin.operations.operations.snapshot.json | 4 +- ...m.linkedin.platform.platform.snapshot.json | 4 +- .../restli-client-api/build.gradle | 14 + .../linkedin/common/client/BaseClient.java | 0 .../linkedin/common/client/ClientCache.java | 0 .../linkedin/entity/client/EntityClient.java | 0 .../entity/client/EntityClientCache.java | 0 .../entity/client/SystemEntityClient.java | 0 .../java/com/linkedin/usage/UsageClient.java | 13 + .../com/linkedin/usage/UsageClientCache.java | 0 metadata-service/restli-client/build.gradle | 1 + ...sageClient.java => RestliUsageClient.java} | 4 +- .../resources/analytics/Analytics.java | 27 +- .../resources/entity/AspectResource.java | 92 +- .../entity/BatchIngestionRunResource.java | 18 +- .../resources/entity/EntityResource.java | 641 +++++----- .../resources/entity/EntityV2Resource.java | 45 +- .../entity/EntityVersionedV2Resource.java | 33 +- .../resources/entity/ResourceUtils.java | 13 - .../resources/lineage/Relationships.java | 51 +- .../operations/OperationsResource.java | 43 +- .../metadata/resources/operations/Utils.java | 17 +- .../resources/platform/PlatformResource.java | 19 +- .../resources/restli/RestliUtils.java | 43 - .../metadata/resources/usage/UsageStats.java | 43 +- metadata-service/services/build.gradle | 6 +- .../metadata/entity/EntityService.java | 8 + .../restoreindices/RestoreIndicesResult.java | 1 + .../metadata/service/RollbackService.java | 26 +- .../metadata/shared/ValidationUtils.java | 245 ---- .../service/RestrictedServiceTest.java | 9 +- .../war/src/main/resources/boot/policies.json | 6 - .../metadata/authorization/ApiGroup.java | 10 + .../metadata/authorization/ApiOperation.java | 14 + .../metadata/authorization/Conjunctive.java | 20 + .../metadata/authorization/Disjunctive.java | 58 + .../authorization/PoliciesConfig.java | 371 +++++- .../authorization/DisjunctiveTest.java | 39 + settings.gradle | 1 + smoke-test/build.gradle | 2 + .../cli/user_groups_cmd/test_group_cmd.py | 6 +- 464 files changed, 7468 insertions(+), 4786 deletions(-) delete mode 100644 datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java delete mode 100644 datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillOwnershipTypesConfig.java delete mode 100644 datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypes.java delete mode 100644 datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypesStep.java delete mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMap.java create mode 100644 entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/CreateIfNotExistsValidator.java delete mode 100644 entity-registry/src/test/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMapTest.java create mode 100644 entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/CreateIfNotExistsValidatorTest.java rename metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/{SearchAuthorizationConfiguration.java => ViewAuthorizationConfiguration.java} (72%) create mode 100644 metadata-auth/auth-api/src/test/java/com/datahub/authorization/AuthUtilTest.java create mode 100644 metadata-operation-context/src/main/java/io/datahubproject/metadata/context/EmptyContext.java create mode 100644 metadata-operation-context/src/main/java/io/datahubproject/metadata/context/RequestContext.java create mode 100644 metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ServicesRegistryContext.java create mode 100644 metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ViewAuthorizationContext.java rename {metadata-service/services/src/main/java/com/linkedin/metadata/service => metadata-operation-context/src/main/java/io/datahubproject/metadata/services}/RestrictedService.java (56%) rename {metadata-service/services/src/main/java/com/linkedin/metadata/secret => metadata-operation-context/src/main/java/io/datahubproject/metadata/services}/SecretService.java (98%) rename metadata-service/factories/src/main/java/com/linkedin/gms/factory/{auth => context/services}/RestrictedServiceFactory.java (75%) rename metadata-service/factories/src/main/java/com/linkedin/gms/factory/{secret => context/services}/SecretServiceFactory.java (83%) delete mode 100644 metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java create mode 100644 metadata-service/restli-client-api/build.gradle rename metadata-service/{restli-client => restli-client-api}/src/main/java/com/linkedin/common/client/BaseClient.java (100%) rename metadata-service/{restli-client => restli-client-api}/src/main/java/com/linkedin/common/client/ClientCache.java (100%) rename metadata-service/{restli-client => restli-client-api}/src/main/java/com/linkedin/entity/client/EntityClient.java (100%) rename metadata-service/{restli-client => restli-client-api}/src/main/java/com/linkedin/entity/client/EntityClientCache.java (100%) rename metadata-service/{restli-client => restli-client-api}/src/main/java/com/linkedin/entity/client/SystemEntityClient.java (100%) create mode 100644 metadata-service/restli-client-api/src/main/java/com/linkedin/usage/UsageClient.java rename metadata-service/{restli-client => restli-client-api}/src/main/java/com/linkedin/usage/UsageClientCache.java (100%) rename metadata-service/restli-client/src/main/java/com/linkedin/usage/{UsageClient.java => RestliUsageClient.java} (96%) delete mode 100644 metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java delete mode 100644 metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiGroup.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiOperation.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/authorization/Conjunctive.java create mode 100644 metadata-utils/src/main/java/com/linkedin/metadata/authorization/Disjunctive.java create mode 100644 metadata-utils/src/test/java/com/linkedin/metadata/authorization/DisjunctiveTest.java diff --git a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java index 04cbadcdc6b7b..e97acb0b43c81 100644 --- a/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java +++ b/buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java @@ -460,6 +460,22 @@ private ObjectNode buildListEntityPath(Entity entity, Set parameterDefin ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", "Create " + upperFirst) .put("operationId", String.format("create", upperFirst)); + ArrayNode postParameters = NODE_FACTORY.arrayNode(); + postMethod.set("parameters", postParameters); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createIfNotExists") + .put("description", "Create the aspect if it does not already exist.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createEntityIfNotExists") + .put("description", "Create the entity ONLY if it does not already exist. Fails in case when the entity exists.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", "Create " + entity.getName() + " entities.") .put("required", true) @@ -610,6 +626,22 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) { ObjectNode postMethod = NODE_FACTORY.objectNode() .put("summary", String.format("Create aspect %s on %s ", aspect, upperFirstEntity)) .put("operationId", String.format("create%s", upperFirstAspect)); + ArrayNode postParameters = NODE_FACTORY.arrayNode(); + postMethod.set("parameters", postParameters); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createIfNotExists") + .put("description", "Create the aspect if it does not already exist.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); + postParameters.add(NODE_FACTORY.objectNode() + .put("in", "query") + .put("name", "createEntityIfNotExists") + .put("description", "Create the entity if it does not already exist. Fails in case when the entity exists.") + .set("schema", NODE_FACTORY.objectNode() + .put("type", "boolean") + .put("default", false))); postMethod.set("requestBody", NODE_FACTORY.objectNode() .put("description", String.format("Create aspect %s on %s entity.", aspect, upperFirstEntity)) .put("required", true).set("content", NODE_FACTORY.objectNode() diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index c929d80328a31..5466c5f127635 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -172,7 +172,7 @@ protected OperationContext provideOperationContext(final Authentication systemAu .authentication(systemAuthentication) .build(); OperationContextConfig systemConfig = OperationContextConfig.builder() - .searchAuthorizationConfiguration(configurationProvider.getAuthorization().getSearch()) + .viewAuthorizationConfiguration(configurationProvider.getAuthorization().getView()) .allowSystemAuthentication(true) .build(); diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index fe70f2622490d..de264ce31b719 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -5,7 +5,7 @@ plugins { dependencies { - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation project(':metadata-service:auth-impl') implementation project(':metadata-service:auth-config') implementation project(':metadata-service:configuration') diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index f5222525368e1..ba7a159c635e4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -349,14 +349,12 @@ import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.recommendation.RecommendationsService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; import com.linkedin.metadata.service.ERModelRelationshipService; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; -import com.linkedin.metadata.service.RestrictedService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; @@ -368,6 +366,8 @@ import graphql.schema.DataFetchingEnvironment; import graphql.schema.StaticDataFetcher; import graphql.schema.idl.RuntimeWiring; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; @@ -1023,13 +1023,14 @@ private DataFetcher getEntitiesResolver() { return new BatchGetEntitiesResolver( entityTypes, (env) -> { + final QueryContext context = env.getContext(); List urns = env.getArgument(URNS_FIELD_NAME); return urns.stream() + .map(UrnUtils::getUrn) .map( (urn) -> { try { - Urn entityUrn = Urn.createFromString(urn); - return UrnToEntityMapper.map(entityUrn); + return UrnToEntityMapper.map(context, urn); } catch (Exception e) { throw new RuntimeException("Failed to get entity", e); } @@ -1043,8 +1044,9 @@ private DataFetcher getEntityResolver() { entityTypes, (env) -> { try { + final QueryContext context = env.getContext(); Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); - return UrnToEntityMapper.map(urn); + return UrnToEntityMapper.map(context, urn); } catch (Exception e) { throw new RuntimeException("Failed to get entity", e); } @@ -2187,7 +2189,12 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { "dataFlow", new LoadableTypeResolver<>( dataFlowType, - (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataFlow() != null + ? dataJob.getDataFlow().getUrn() + : null; + })) .dataFetcher( "dataPlatformInstance", new LoadableTypeResolver<>( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index db63dfc19b398..5f5e1c929f6ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -23,20 +23,20 @@ import com.linkedin.metadata.graph.SiblingGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.RecommendationsService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; import com.linkedin.metadata.service.ERModelRelationshipService; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; -import com.linkedin.metadata.service.RestrictedService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.version.GitVersion; import com.linkedin.usage.UsageClient; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import lombok.Data; @Data diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index baea3ea4e6201..72643ccac6325 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -220,7 +220,8 @@ private List extractBarSegmentsFromAggregations( .collect(Collectors.toList()); } - public Row buildRow(String groupByValue, Function groupByValueToCell, int count) { + public static Row buildRow( + String groupByValue, Function groupByValueToCell, int count) { List values = ImmutableList.of(groupByValue, String.valueOf(count)); List cells = ImmutableList.of( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java index be7f4d2f0897a..20f0bd7631faa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java @@ -49,7 +49,7 @@ public static Cell buildCellWithEntityLandingPage(String urn) { Cell result = new Cell(); result.setValue(urn); try { - Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn)); + Entity entity = UrnToEntityMapper.map(null, Urn.createFromString(urn)); result.setEntity(entity); result.setLinkParams( LinkParams.builder() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index f2f06f9c2c47f..777d0982644cc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.authorization; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.datahub.authorization.AuthUtil.VIEW_RESTRICTED_ENTITY_TYPES; +import static com.datahub.authorization.AuthUtil.canViewEntity; import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.authorization.PoliciesConfig.VIEW_ENTITY_PRIVILEGES; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; import com.datahub.authorization.AuthUtil; import com.datahub.authorization.ConjunctivePrivilegeGroup; @@ -10,42 +12,54 @@ import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; -import java.time.Clock; +import io.datahubproject.metadata.context.OperationContext; +import java.lang.reflect.Field; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.util.List; -import java.util.Optional; +import java.util.Set; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import org.apache.commons.lang3.reflect.ConstructorUtils; +import org.apache.commons.lang3.reflect.FieldUtils; +import org.apache.commons.lang3.reflect.MethodUtils; +import org.codehaus.plexus.util.StringUtils; +@Slf4j public class AuthorizationUtils { - private static final Clock CLOCK = Clock.systemUTC(); + private static final String GRAPHQL_GENERATED_PACKAGE = "com.linkedin.datahub.graphql.generated"; - public static AuditStamp createAuditStamp(@Nonnull QueryContext context) { - return new AuditStamp() - .setTime(CLOCK.millis()) - .setActor(UrnUtils.getUrn(context.getActorUrn())); - } + public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static boolean canManageUsersAndGroups(@Nonnull QueryContext context) { - return isAuthorized( - context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), + context.getAuthorizer(), + MANAGE, + List.of(CORP_USER_ENTITY_NAME, CORP_GROUP_ENTITY_NAME)); } public static boolean canManagePolicies(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } public static boolean canGeneratePersonalAccessToken(@Nonnull QueryContext context) { - return isAuthorized( - context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } public static boolean canManageTokens(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(ACCESS_TOKEN_ENTITY_NAME)); } /** @@ -61,12 +75,13 @@ public static boolean canCreateDomains(@Nonnull QueryContext context) { new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageDomains(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } /** @@ -82,24 +97,25 @@ public static boolean canCreateTags(@Nonnull QueryContext context) { new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageTags(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TAGS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TAGS_PRIVILEGE); } public static boolean canDeleteEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - return isAuthorized( - context, - Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), - PoliciesConfig.DELETE_ENTITY_PRIVILEGE); + return AuthUtil.isAuthorizedEntityUrns( + context.getAuthorizer(), context.getActorUrn(), DELETE, List.of(entityUrn)); } public static boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized( - context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } public static boolean canEditGroupMembers( @@ -111,7 +127,7 @@ public static boolean canEditGroupMembers( new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( + return isAuthorized( context.getAuthorizer(), context.getActorUrn(), CORP_GROUP_ENTITY_NAME, @@ -130,28 +146,27 @@ public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context ImmutableList.of( PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups, null); } public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE); } public static boolean canManageGlobalViews(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_VIEWS); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOBAL_VIEWS); } public static boolean canManageOwnershipTypes(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); } public static boolean canEditProperties(@Nonnull Urn targetUrn, @Nonnull QueryContext context) { @@ -209,37 +224,149 @@ public static boolean canDeleteQuery( return canEditEntityQueries(subjectUrns, context); } - public static boolean canViewEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(VIEW_ENTITY_PRIVILEGES))); - - final Authorizer authorizer = context.getAuthorizer(); - final String actor = context.getActorUrn(); - final String entityType = entityUrn.getEntityType(); - final Optional resourceSpec = - Optional.of(new EntitySpec(entityType, entityUrn.toString())); - - return AuthUtil.isAuthorized(authorizer, actor, resourceSpec, orGroup); + /** + * Can view relationship logic goes here. Should be considered directionless for now. Or direction + * added to the interface. + * + * @param opContext + * @param a + * @param b + * @return + */ + public static boolean canViewRelationship( + @Nonnull OperationContext opContext, @Nonnull Urn a, @Nonnull Urn b) { + // TODO relationships filter + return true; } - public static boolean isAuthorized( - @Nonnull QueryContext context, - @Nonnull Optional resourceSpec, - @Nonnull PoliciesConfig.Privilege privilege) { - final Authorizer authorizer = context.getAuthorizer(); - final String actor = context.getActorUrn(); - final ConjunctivePrivilegeGroup andGroup = - new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); - return AuthUtil.isAuthorized( - authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); + /* + * Optionally check view permissions against a list of urns if the config option is enabled + */ + public static boolean canView(@Nonnull OperationContext opContext, @Nonnull Urn urn) { + // if search authorization is disabled, skip the view permission check + if (opContext.getOperationContextConfig().getViewAuthorizationConfiguration().isEnabled() + && !opContext.isSystemAuth() + && VIEW_RESTRICTED_ENTITY_TYPES.contains(urn.getEntityType())) { + + return opContext + .getViewAuthorizationContext() + .map( + viewAuthContext -> { + + // check cache + if (viewAuthContext.canView(Set.of(urn))) { + return true; + } + + if (!canViewEntity( + opContext.getSessionAuthentication().getActor().toUrnStr(), + opContext.getAuthorizerContext().getAuthorizer(), + urn)) { + return false; + } + + // cache viewable urn + viewAuthContext.addViewableUrns(Set.of(urn)); + return true; + }) + .orElse(false); + } + return true; } - public static boolean isAuthorized( - @Nonnull Authorizer authorizer, - @Nonnull String actor, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { - return AuthUtil.isAuthorized(authorizer, actor, Optional.empty(), privilegeGroup); + public static T restrictEntity(@Nonnull Object entity, Class clazz) { + List allFields = FieldUtils.getAllFieldsList(entity.getClass()); + try { + Object[] args = + allFields.stream() + .map( + field -> { + // properties are often not required but only because + // they are a `one of` non-null. + // i.e. ChartProperties or ChartEditableProperties are required. + if (field.getAnnotation(javax.annotation.Nonnull.class) != null + || field.getName().toLowerCase().contains("properties")) { + try { + switch (field.getName()) { + // pass through to the restricted entity + case "name": + case "type": + case "urn": + case "chartId": + case "id": + case "jobId": + case "flowId": + Method fieldGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return fieldGetter.invoke(entity, (Object[]) null); + default: + switch (field.getType().getSimpleName()) { + case "Boolean": + Method boolGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return boolGetter.invoke(entity, (Object[]) null); + // mask these fields in the restricted entity + case "String": + return ""; + case "Integer": + return 0; + case "Long": + return 0L; + case "Double": + return 0.0; + case "List": + return List.of(); + default: + if (Enum.class.isAssignableFrom(field.getType())) { + // pass through enum + Method enumGetter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + return enumGetter.invoke(entity, (Object[]) null); + } else if (entity + .getClass() + .getPackage() + .getName() + .contains(GRAPHQL_GENERATED_PACKAGE)) { + // handle nested fields recursively + Method getter = + MethodUtils.getMatchingMethod( + entity.getClass(), + "get" + StringUtils.capitalise(field.getName())); + Object nestedEntity = getter.invoke(entity, (Object[]) null); + if (nestedEntity == null) { + return null; + } else { + return restrictEntity(nestedEntity, getter.getReturnType()); + } + } + log.error( + String.format( + "Failed to resolve non-null field: Object:%s Field:%s FieldType: %s", + entity.getClass().getName(), + field.getName(), + field.getType().getName())); + } + } + } catch (IllegalAccessException | InvocationTargetException e) { + throw new RuntimeException(e); + } + } + return (Object) null; + }) + .toArray(); + return ConstructorUtils.invokeConstructor(clazz, args); + } catch (NoSuchMethodException + | IllegalAccessException + | InvocationTargetException + | InstantiationException e) { + throw new RuntimeException(e); + } } public static boolean isAuthorized( @@ -249,15 +376,16 @@ public static boolean isAuthorized( @Nonnull String resource, @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { final EntitySpec resourceSpec = new EntitySpec(resourceType, resource); - return AuthUtil.isAuthorized(authorizer, actor, Optional.of(resourceSpec), privilegeGroup); + return AuthUtil.isAuthorized(authorizer, actor, privilegeGroup, resourceSpec); } public static boolean isViewDatasetUsageAuthorized( - final Urn resourceUrn, final QueryContext context) { - return isAuthorized( - context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); + final QueryContext context, final Urn resourceUrn) { + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE, + new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())); } private AuthorizationUtils() {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java deleted file mode 100644 index 9faf00e0211bd..0000000000000 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java +++ /dev/null @@ -1,32 +0,0 @@ -package com.linkedin.datahub.graphql.resolvers; - -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.authorization.PoliciesConfig; -import java.util.List; -import java.util.Optional; - -public class AuthUtils { - - public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - - public static boolean isAuthorized( - String principal, List privilegeGroup, Authorizer authorizer) { - for (final String privilege : privilegeGroup) { - final AuthorizationRequest request = - new AuthorizationRequest(principal, privilege, Optional.empty()); - final AuthorizationResult result = authorizer.authorize(request); - if (AuthorizationResult.Type.DENY.equals(result.getType())) { - return false; - } - } - return true; - } - - private AuthUtils() {} -} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index b480e287adb9b..30817d1c62152 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.datahub.authorization.AuthUtil.isAuthorized; +import static com.datahub.authorization.AuthUtil.isAuthorizedEntityType; import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.authorization.ApiGroup.ANALYTICS; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; +import static com.linkedin.metadata.authorization.ApiOperation.READ; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -22,7 +24,7 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Collections; -import java.util.Optional; +import java.util.List; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; @@ -59,7 +61,7 @@ public CompletableFuture get(DataFetchingEnvironment environm null, context.getAuthentication()) .get(userUrn); - final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); + final CorpUser corpUser = CorpUserMapper.map(context, gmsUser, _featureFlags); // 2. Get platform privileges final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); @@ -98,22 +100,22 @@ public CompletableFuture get(DataFetchingEnvironment environm /** Returns true if the authenticated user has privileges to view analytics. */ private boolean canViewAnalytics(final QueryContext context) { - return isAuthorized( - context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); + return isAuthorized(context.getActorUrn(), context.getAuthorizer(), ANALYTICS, READ); } /** Returns true if the authenticated user has privileges to manage policies analytics. */ private boolean canManagePolicies(final QueryContext context) { - return isAuthorized( - context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } /** Returns true if the authenticated user has privileges to manage users & groups. */ private boolean canManageUsersGroups(final QueryContext context) { - return isAuthorized( - context.getAuthorizer(), + return isAuthorizedEntityType( context.getActorUrn(), - PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + context.getAuthorizer(), + MANAGE, + List.of(CORP_USER_ENTITY_NAME, CORP_GROUP_ENTITY_NAME)); } /** Returns true if the authenticated user has privileges to generate personal access tokens */ @@ -155,15 +157,4 @@ private boolean canManageUserCredentials(@Nonnull QueryContext context) { context.getActorUrn(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - - /** - * Returns true if the provided actor is authorized for a particular privilege, false otherwise. - */ - private boolean isAuthorized( - final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { - final AuthorizationRequest request = - new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); - final AuthorizationResult result = authorizer.authorize(request); - return AuthorizationResult.Type.ALLOW.equals(result.getType()); - } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index 2a074b950d0ff..005fa6de22adf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -70,7 +70,9 @@ public CompletableFuture get(DataFetchingEnvironment e // Step 2: Bind profiles into GraphQL strong types. List runEvents = - aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); + aspects.stream() + .map(a -> AssertionRunEventMapper.map(context, a)) + .collect(Collectors.toList()); // Step 3: Package and return response. final AssertionRunEventsResult result = new AssertionRunEventsResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index cbf685e9f45bd..b1924ef025c2d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -8,7 +10,6 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -104,7 +105,7 @@ private boolean isAuthorizedToDeleteAssertionFromAssertee( final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, + ALL_PRIVILEGES_GROUP, new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java index 9814589df7651..3cfcc299e774d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java @@ -83,7 +83,7 @@ public CompletableFuture get(DataFetchingEnvironment env final List assertions = gmsResults.stream() .filter(Objects::nonNull) - .map(AssertionMapper::map) + .map(r -> AssertionMapper.map(context, r)) .collect(Collectors.toList()); // Step 4: Package and return result diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index b40c6a3fd0f78..5443525e52cf1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -30,6 +30,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -53,7 +54,7 @@ public CompletableFuture get(DataFetchingEnvironment environmen final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; final String query = input.getQuery() != null ? input.getQuery() : "*"; - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); @@ -86,7 +87,7 @@ public CompletableFuture get(DataFetchingEnvironment environmen sanitizedQuery, start, count); - return mapBrowseResults(browseResults); + return mapBrowseResults(context, browseResults); } catch (Exception e) { throw new RuntimeException("Failed to execute browse V2", e); } @@ -105,7 +106,8 @@ public static List getEntityNames(BrowseV2Input input) { return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); } - private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { + private BrowseResultsV2 mapBrowseResults( + @Nullable QueryContext context, BrowseResultV2 browseResults) { BrowseResultsV2 results = new BrowseResultsV2(); results.setTotal(browseResults.getNumGroups()); results.setStart(browseResults.getFrom()); @@ -121,7 +123,7 @@ private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { browseGroup.setCount(group.getCount()); browseGroup.setHasSubGroups(group.isHasSubGroups()); if (group.hasUrn() && group.getUrn() != null) { - browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); + browseGroup.setEntity(UrnToEntityMapper.map(context, group.getUrn())); } groups.add(browseGroup); }); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index b757a8aa2aab9..f9cb75052dcc4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -78,6 +78,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro .setValue(urn); return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( context.getOperationContext(), CONTAINABLE_ENTITY_NAMES, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java index 9502fb8e5cb93..82fbc8ad41268 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java @@ -48,7 +48,7 @@ private void aggregateParentContainers( _entityClient.getV2( containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); if (response != null) { - Container mappedContainer = ContainerMapper.map(response); + Container mappedContainer = ContainerMapper.map(context, response); containers.add(mappedContainer); aggregateParentContainers(containers, mappedContainer.getUrn(), context); } @@ -70,8 +70,11 @@ public CompletableFuture get(DataFetchingEnvironment env try { aggregateParentContainers(containers, urn, context); final ParentContainersResult result = new ParentContainersResult(); - result.setCount(containers.size()); - result.setContainers(containers); + + List viewable = new ArrayList<>(containers); + + result.setCount(viewable.size()); + result.setContainers(viewable); return result; } catch (DataHubGraphQLException e) { throw new RuntimeException("Failed to load all containers", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java index 97a5793e33ff0..717360e58e184 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java @@ -18,6 +18,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -44,7 +45,7 @@ public CompletableFuture get(DataFetchingEnvironment envi try { // TODO: We don't have a dashboard specific priv - if (!isViewDatasetUsageAuthorized(resourceUrn, context)) { + if (!isViewDatasetUsageAuthorized(context, resourceUrn)) { log.debug( "User {} is not authorized to view usage information for {}", context.getActorUrn(), @@ -57,9 +58,9 @@ public CompletableFuture get(DataFetchingEnvironment envi // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. List dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + context, resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); if (dashboardUsageMetrics.size() > 0) { - result.setViewCount(getDashboardViewCount(resourceUrn)); + result.setViewCount(getDashboardViewCount(context, resourceUrn)); } // Obtain unique user statistics, by rolling up unique users over the past month. @@ -84,10 +85,10 @@ public CompletableFuture get(DataFetchingEnvironment envi }); } - private int getDashboardViewCount(final Urn resourceUrn) { + private int getDashboardViewCount(@Nullable QueryContext context, final Urn resourceUrn) { List dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + context, resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); return dashboardUsageMetrics.get(0).getViewsCount(); } @@ -99,7 +100,7 @@ private List getDashboardUsagePerUser(final Urn resour return getUserUsageCounts(bucketStatsFilter, this.timeseriesAspectService); } - private List trimUsers(final List originalUsers) { + private static List trimUsers(final List originalUsers) { if (originalUsers.size() > MAX_TOP_USERS) { return originalUsers.subList(0, MAX_TOP_USERS); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java index 07d028b07b01d..2dd4654e88466 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java @@ -4,6 +4,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUsageQueryResult; @@ -26,6 +27,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** @@ -46,6 +48,7 @@ public DashboardUsageStatsResolver(TimeseriesAspectService timeseriesAspectServi @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); final String dashboardUrn = ((Entity) environment.getSource()).getUrn(); final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); @@ -70,14 +73,18 @@ public CompletableFuture get(DataFetchingEnvironment // Absolute usage metrics List dashboardUsageMetrics = getDashboardUsageMetrics( - dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); + context, dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); usageQueryResult.setMetrics(dashboardUsageMetrics); return usageQueryResult; }); } private List getDashboardUsageMetrics( - String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, Integer maybeLimit) { + @Nullable QueryContext context, + String dashboardUrn, + Long maybeStartTimeMillis, + Long maybeEndTimeMillis, + Integer maybeLimit) { List dashboardUsageMetrics; try { Filter filter = new Filter(); @@ -104,7 +111,9 @@ private List getDashboardUsageMetrics( maybeLimit, filter); dashboardUsageMetrics = - aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + aspects.stream() + .map(a -> DashboardUsageMetricMapper.map(context, a)) + .collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java index 4f170a296c47e..e54ae184c91ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java @@ -3,6 +3,7 @@ import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregationMetrics; @@ -31,6 +32,7 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class DashboardUsageStatsUtils { @@ -40,6 +42,7 @@ public class DashboardUsageStatsUtils { public static final String ES_NULL_VALUE = "NULL"; public static List getDashboardUsageMetrics( + @Nullable QueryContext context, String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, @@ -58,7 +61,9 @@ public static List getDashboardUsageMetrics( maybeLimit, filter); dashboardUsageMetrics = - aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + aspects.stream() + .map(m -> DashboardUsageMetricMapper.map(context, m)) + .collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index 8ac7b2c3ce375..a6d6e9debccec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -56,7 +56,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm EntityResponse response = _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); if (response != null) { - return DataProductMapper.map(response); + return DataProductMapper.map(context, response); } // should never happen log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index 29729a8799e79..ee99adc01fccb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -137,7 +137,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } else { searchFlags = null; } @@ -152,6 +152,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) count); return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( context .getOperationContext() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java index 304ef96d90aa5..71d562048a8e9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java @@ -62,7 +62,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm EntityResponse response = _dataProductService.getDataProductEntityResponse(urn, authentication); if (response != null) { - return DataProductMapper.map(response); + return DataProductMapper.map(context, response); } // should never happen log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java index 066d4f9bf8754..37ab6ac4575b4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java @@ -44,7 +44,7 @@ public CompletableFuture get(DataFetchingEnvironment enviro return CompletableFuture.supplyAsync( () -> { try { - if (!AuthorizationUtils.isViewDatasetUsageAuthorized(resourceUrn, context)) { + if (!AuthorizationUtils.isViewDatasetUsageAuthorized(context, resourceUrn)) { log.debug( "User {} is not authorized to view profile information for dataset {}", context.getActorUrn(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index 03d08024eb73b..b2348eb2d8f4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -34,7 +34,7 @@ public CompletableFuture get(DataFetchingEnvironment environme return CompletableFuture.supplyAsync( () -> { - if (!isViewDatasetUsageAuthorized(resourceUrn, context)) { + if (!isViewDatasetUsageAuthorized(context, resourceUrn)) { log.debug( "User {} is not authorized to view usage information for dataset {}", context.getActorUrn(), @@ -44,7 +44,7 @@ public CompletableFuture get(DataFetchingEnvironment environme try { com.linkedin.usage.UsageQueryResult usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), range); - return UsageQueryResultMapper.map(usageQueryResult); + return UsageQueryResultMapper.map(context, usageQueryResult); } catch (Exception e) { log.error(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); MetricUtils.counter(this.getClass(), "usage_stats_dropped").inc(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index e251e36a3e15f..ed3eca0b78b60 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.metadata.Constants.*; @@ -13,7 +14,6 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateDeprecationInput; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -89,7 +89,7 @@ private boolean isAuthorizedToUpdateDeprecationForEntity( final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, + ALL_PRIVILEGES_GROUP, new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 829b2f903833d..905c992a0b65f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -85,6 +85,7 @@ public CompletableFuture get(final DataFetchingEnvironment enviro } return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( context.getOperationContext(), SEARCHABLE_ENTITY_TYPES.stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java index 8406e19810468..3478cffb032b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canViewRelationship; import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; import com.linkedin.common.urn.Urn; @@ -16,6 +17,7 @@ import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; public class ParentDomainsResolver implements DataFetcher> { @@ -50,9 +52,20 @@ public CompletableFuture get(DataFetchingEnvironment enviro Urn.createFromString(parentDomain.getUrn()), context, _entityClient); } + List viewable = + parentDomains.stream() + .filter( + e -> + context == null + || canViewRelationship( + context.getOperationContext(), + UrnUtils.getUrn(e.getUrn()), + urn)) + .collect(Collectors.toList()); + final ParentDomainsResult result = new ParentDomainsResult(); - result.setCount(parentDomains.size()); - result.setDomains(parentDomains); + result.setCount(viewable.size()); + result.setDomains(viewable); return result; } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java index 92f090946db93..82a9b6a939e6d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.entity; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; + +import com.datahub.authorization.AuthUtil; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,10 +15,9 @@ import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Collections; +import java.util.List; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; @@ -102,22 +103,8 @@ private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext con } private boolean canEditEntityLineage(Urn urn, QueryContext context) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - DisjunctivePrivilegeGroup orPrivilegesGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup( - Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - urn.getEntityType(), - urn.toString(), - orPrivilegesGroup); + return AuthUtil.isAuthorizedUrns( + context.getAuthorizer(), context.getActorUrn(), LINEAGE, UPDATE, List.of(urn)); } private EntityPrivileges getDatasetPrivileges(Urn urn, QueryContext context) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java index 850469f996515..1ac5ced304dbe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canViewRelationship; import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -23,6 +25,7 @@ import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; public class ParentNodesResolver implements DataFetcher> { @@ -53,7 +56,7 @@ private void aggregateParentNodes(List nodes, String urn, QueryCon _entityClient.getV2( parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { - GlossaryNode mappedNode = GlossaryNodeMapper.map(response); + GlossaryNode mappedNode = GlossaryNodeMapper.map(context, response); nodes.add(mappedNode); aggregateParentNodes(nodes, mappedNode.getUrn(), context); } @@ -85,7 +88,7 @@ private GlossaryNode getTermParentNode(String urn, QueryContext context) { _entityClient.getV2( parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { - GlossaryNode mappedNode = GlossaryNodeMapper.map(response); + GlossaryNode mappedNode = GlossaryNodeMapper.map(context, response); return mappedNode; } } @@ -117,9 +120,20 @@ public CompletableFuture get(DataFetchingEnvironment environm aggregateParentNodes(nodes, urn, context); } + List viewable = + nodes.stream() + .filter( + e -> + context == null + || canViewRelationship( + context.getOperationContext(), + UrnUtils.getUrn(e.getUrn()), + UrnUtils.getUrn(urn))) + .collect(Collectors.toList()); + final ParentNodesResult result = new ParentNodesResult(); - result.setCount(nodes.size()); - result.setNodes(nodes); + result.setCount(viewable.size()); + result.setNodes(viewable); return result; } catch (DataHubGraphQLException | URISyntaxException e) { throw new RuntimeException(("Failed to load parent nodes")); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java index c0c3217fd056d..089582b66836f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/EntityIncidentsResolver.java @@ -90,7 +90,7 @@ public CompletableFuture get(DataFetchingEnvironment envi final List incidents = entityResult.stream() .filter(Objects::nonNull) - .map(IncidentMapper::map) + .map(i -> IncidentMapper.map(context, i)) .collect(Collectors.toList()); // Step 4: Package and return result diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java index 2314b3fab5b4a..117ee59553d4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/RaiseIncidentResolver.java @@ -1,6 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.incident; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; import static com.linkedin.metadata.Constants.*; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java index c9d3c23021d38..c88847c13accd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/incident/UpdateIncidentStatusResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.incident; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; import static com.linkedin.metadata.Constants.*; @@ -16,7 +17,6 @@ import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.UpdateIncidentStatusInput; -import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.incident.IncidentInfo; import com.linkedin.incident.IncidentState; @@ -92,7 +92,7 @@ private boolean isAuthorizedToUpdateIncident(final Urn resourceUrn, final QueryC final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, + ALL_PRIVILEGES_GROUP, new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.EDIT_ENTITY_INCIDENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java index 036780d446701..24d0e94614505 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java @@ -1,29 +1,28 @@ package com.linkedin.datahub.graphql.resolvers.ingest; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.datahub.authorization.AuthUtil.isAuthorizedEntityType; +import static com.linkedin.metadata.Constants.INGESTION_SOURCE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.SECRETS_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthUtil; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.metadata.authorization.PoliciesConfig; +import java.util.List; import javax.annotation.Nonnull; public class IngestionAuthUtils { public static boolean canManageIngestion(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized( - principal, - ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), - authorizer); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), + context.getAuthorizer(), + MANAGE, + List.of(INGESTION_SOURCE_ENTITY_NAME)); } public static boolean canManageSecrets(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized( - principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); + return isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(SECRETS_ENTITY_NAME)); } private IngestionAuthUtils() {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java index ffa9dcf42d176..800a41330346a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ExecutionRequest; import com.linkedin.datahub.graphql.generated.IngestionConfig; import com.linkedin.datahub.graphql.generated.IngestionSchedule; @@ -23,21 +24,23 @@ import java.util.Collection; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j public class IngestionResolverUtils { public static List mapExecutionRequests( - final Collection requests) { + @Nullable QueryContext context, final Collection requests) { List result = new ArrayList<>(); for (final EntityResponse request : requests) { - result.add(mapExecutionRequest(request)); + result.add(mapExecutionRequest(context, request)); } return result; } - public static ExecutionRequest mapExecutionRequest(final EntityResponse entityResponse) { + public static ExecutionRequest mapExecutionRequest( + @Nullable QueryContext context, final EntityResponse entityResponse) { final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -59,7 +62,7 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe inputResult.setSource(mapExecutionRequestSource(executionRequestInput.getSource())); } if (executionRequestInput.hasArgs()) { - inputResult.setArguments(StringMapMapper.map(executionRequestInput.getArgs())); + inputResult.setArguments(StringMapMapper.map(context, executionRequestInput.getArgs())); } inputResult.setRequestedAt(executionRequestInput.getRequestedAt()); result.setInput(inputResult); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java index 722ffe3aba6b8..4975b03639927 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java @@ -58,7 +58,7 @@ public CompletableFuture get(final DataFetchingEnvironment env DataHubGraphQLErrorCode.NOT_FOUND); } // Execution request found - return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); + return IngestionResolverUtils.mapExecutionRequest(context, entities.get(urn)); } catch (Exception e) { throw new RuntimeException("Failed to retrieve execution request", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index 4c8a06e2d585a..e3e4b692a45da 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -103,6 +103,7 @@ public CompletableFuture get( result.setTotal(executionsSearchResult.getNumEntities()); result.setExecutionRequests( IngestionResolverUtils.mapExecutionRequests( + context, executionsSearchResult.getEntities().stream() .map(searchResult -> entities.get(searchResult.getEntity())) .filter(Objects::nonNull) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index f5e7cf4d69ce8..750e048e39ce6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -13,12 +13,12 @@ import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.key.DataHubSecretKey; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.concurrent.CompletableFuture; /** diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java index 67564aa721bda..0205601d5ccda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java @@ -13,10 +13,10 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.HashSet; import java.util.List; import java.util.Map; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java index 20a685265b545..e3ad036159060 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolver.java @@ -12,11 +12,11 @@ import com.linkedin.datahub.graphql.types.ingest.secret.mapper.DataHubSecretValueMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.Set; import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index 325e804327b72..64f8f083a164e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -90,7 +90,7 @@ public CompletableFuture get(DataFetchingEnvironment final List dataProcessInstances = gmsResults.stream() .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) + .map(p -> DataProcessInstanceMapper.map(context, p)) .collect(Collectors.toList()); // Step 4: Package and return result diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index 1c6fa352fecb6..660484cc27e15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -96,7 +96,7 @@ public CompletableFuture get(DataFetchingEnvironment final List dataProcessInstances = gmsResults.stream() .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) + .map(p -> DataProcessInstanceMapper.map(context, p)) .collect(Collectors.toList()); // Step 4: Package and return result diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index 804bd6ca05431..020688765cc53 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -1,10 +1,11 @@ package com.linkedin.datahub.graphql.resolvers.lineage; +import static com.datahub.authorization.AuthUtil.buildDisjunctivePrivilegeGroup; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; -import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -13,13 +14,11 @@ import com.linkedin.datahub.graphql.generated.LineageEdge; import com.linkedin.datahub.graphql.generated.UpdateLineageInput; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -244,15 +243,9 @@ private void checkPrivileges( @Nonnull final QueryContext context, @Nonnull final List edgesToAdd, @Nonnull final List edgesToRemove) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup editLineagePrivileges = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup( - Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); + buildDisjunctivePrivilegeGroup(LINEAGE, UPDATE, null); for (LineageEdge edgeToAdd : edgesToAdd) { checkLineageEdgePrivileges(context, edgeToAdd, editLineagePrivileges); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index 6f56bfed94240..6c6a98c1ac058 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -15,11 +15,10 @@ import com.linkedin.datahub.graphql.generated.LineageRelationship; import com.linkedin.datahub.graphql.generated.Restricted; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; -import com.linkedin.metadata.Constants; import com.linkedin.metadata.graph.SiblingGraphService; -import com.linkedin.metadata.service.RestrictedService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.RestrictedService; import java.util.HashSet; import java.util.Set; import java.util.concurrent.CompletableFuture; @@ -55,10 +54,6 @@ public CompletableFuture get(DataFetchingEnvironment enviro Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); - if (urn.getEntityType().equals(Constants.RESTRICTED_ENTITY_NAME)) { - urn = _restrictedService.decryptRestrictedUrn(urn); - } - final LineageDirection lineageDirection = input.getDirection(); @Nullable final Integer start = input.getStart(); // Optional! @Nullable final Integer count = input.getCount(); // Optional! @@ -90,13 +85,14 @@ public CompletableFuture get(DataFetchingEnvironment enviro .getRelationships() .forEach( rel -> { - if (_authorizationConfiguration.getSearch().isEnabled() - && !AuthorizationUtils.canViewEntity(rel.getEntity(), context)) { + if (_authorizationConfiguration.getView().isEnabled() + && !AuthorizationUtils.canViewRelationship( + context.getOperationContext(), rel.getEntity(), urn)) { restrictedUrns.add(rel.getEntity()); } }); - return mapEntityRelationships(entityLineageResult, restrictedUrns); + return mapEntityRelationships(context, entityLineageResult, restrictedUrns); } catch (Exception e) { log.error("Failed to fetch lineage for {}", finalUrn); throw new RuntimeException( @@ -106,6 +102,7 @@ public CompletableFuture get(DataFetchingEnvironment enviro } private EntityLineageResult mapEntityRelationships( + @Nullable final QueryContext context, final com.linkedin.metadata.graph.EntityLineageResult entityLineageResult, final Set restrictedUrns) { final EntityLineageResult result = new EntityLineageResult(); @@ -115,12 +112,13 @@ private EntityLineageResult mapEntityRelationships( result.setFiltered(entityLineageResult.getFiltered()); result.setRelationships( entityLineageResult.getRelationships().stream() - .map(r -> mapEntityRelationship(r, restrictedUrns)) + .map(r -> mapEntityRelationship(context, r, restrictedUrns)) .collect(Collectors.toList())); return result; } private LineageRelationship mapEntityRelationship( + @Nullable final QueryContext context, final com.linkedin.metadata.graph.LineageRelationship lineageRelationship, final Set restrictedUrns) { final LineageRelationship result = new LineageRelationship(); @@ -133,7 +131,7 @@ private LineageRelationship mapEntityRelationship( restrictedEntity.setUrn(restrictedUrnString); result.setEntity(restrictedEntity); } else { - final Entity partialEntity = UrnToEntityMapper.map(lineageRelationship.getEntity()); + final Entity partialEntity = UrnToEntityMapper.map(context, lineageRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); } @@ -145,14 +143,14 @@ private LineageRelationship mapEntityRelationship( } if (lineageRelationship.hasCreatedActor()) { final Urn createdActor = lineageRelationship.getCreatedActor(); - result.setCreatedActor(UrnToEntityMapper.map(createdActor)); + result.setCreatedActor(UrnToEntityMapper.map(context, createdActor)); } if (lineageRelationship.hasUpdatedOn()) { result.setUpdatedOn(lineageRelationship.getUpdatedOn()); } if (lineageRelationship.hasUpdatedActor()) { final Urn updatedActor = lineageRelationship.getUpdatedActor(); - result.setUpdatedActor(UrnToEntityMapper.map(updatedActor)); + result.setUpdatedActor(UrnToEntityMapper.map(context, updatedActor)); } result.setIsManual(lineageRelationship.hasIsManual() && lineageRelationship.isIsManual()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java index 223548d5d6242..f3edbf8a3a737 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import com.linkedin.common.EntityRelationship; @@ -17,6 +18,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; /** * GraphQL Resolver responsible for fetching relationships between entities in the DataHub graph. @@ -47,6 +49,7 @@ public CompletableFuture get(DataFetchingEnvironment return CompletableFuture.supplyAsync( () -> mapEntityRelationships( + context, fetchEntityRelationships( urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()), resolvedDirection)); @@ -64,17 +67,28 @@ private EntityRelationships fetchEntityRelationships( } private EntityRelationshipsResult mapEntityRelationships( + @Nullable final QueryContext context, final EntityRelationships entityRelationships, final RelationshipDirection relationshipDirection) { final EntityRelationshipsResult result = new EntityRelationshipsResult(); + + List viewable = + entityRelationships.getRelationships().stream() + .filter( + rel -> context == null || canView(context.getOperationContext(), rel.getEntity())) + .collect(Collectors.toList()); + result.setStart(entityRelationships.getStart()); - result.setCount(entityRelationships.getCount()); - result.setTotal(entityRelationships.getTotal()); + result.setCount(viewable.size()); + // TODO fix the calculation at the graph call + result.setTotal( + entityRelationships.getTotal() - (entityRelationships.getCount() - viewable.size())); result.setRelationships( - entityRelationships.getRelationships().stream() + viewable.stream() .map( entityRelationship -> mapEntityRelationship( + context, com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf( relationshipDirection.name()), entityRelationship)) @@ -83,18 +97,19 @@ private EntityRelationshipsResult mapEntityRelationships( } private com.linkedin.datahub.graphql.generated.EntityRelationship mapEntityRelationship( + @Nullable final QueryContext context, final com.linkedin.datahub.graphql.generated.RelationshipDirection direction, final EntityRelationship entityRelationship) { final com.linkedin.datahub.graphql.generated.EntityRelationship result = new com.linkedin.datahub.graphql.generated.EntityRelationship(); - final Entity partialEntity = UrnToEntityMapper.map(entityRelationship.getEntity()); + final Entity partialEntity = UrnToEntityMapper.map(context, entityRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); } result.setType(entityRelationship.getType()); result.setDirection(direction); if (entityRelationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(entityRelationship.getCreated())); + result.setCreated(AuditStampMapper.map(context, entityRelationship.getCreated())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index 0d00823697c25..f233209cda67e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -2,9 +2,9 @@ import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FilterInput; import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; @@ -22,9 +22,8 @@ import graphql.schema.DataFetchingEnvironment; import java.util.Collections; import java.util.List; -import java.util.Optional; import java.util.concurrent.CompletableFuture; -import java.util.function.Function; +import java.util.function.BiFunction; import java.util.stream.Collectors; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -48,14 +47,14 @@ public class TimeSeriesAspectResolver private final EntityClient _client; private final String _entityName; private final String _aspectName; - private final Function _aspectMapper; + private final BiFunction _aspectMapper; private final SortCriterion _sort; public TimeSeriesAspectResolver( final EntityClient client, final String entityName, final String aspectName, - final Function aspectMapper) { + final BiFunction aspectMapper) { this(client, entityName, aspectName, aspectMapper, null); } @@ -63,7 +62,7 @@ public TimeSeriesAspectResolver( final EntityClient client, final String entityName, final String aspectName, - final Function aspectMapper, + final BiFunction aspectMapper, final SortCriterion sort) { _client = client; _entityName = entityName; @@ -76,10 +75,11 @@ public TimeSeriesAspectResolver( private boolean isAuthorized(QueryContext context, String urn) { if (_entityName.equals(Constants.DATASET_ENTITY_NAME) && _aspectName.equals(Constants.DATASET_PROFILE_ASPECT_NAME)) { - return AuthorizationUtils.isAuthorized( - context, - Optional.of(new EntitySpec(_entityName, urn)), - PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE, + new EntitySpec(_entityName, urn)); } return true; } @@ -123,7 +123,9 @@ public CompletableFuture> get(DataFetchingEnvironment env context.getAuthentication()); // Step 2: Bind profiles into GraphQL strong types. - return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); + return aspects.stream() + .map(a -> _aspectMapper.apply(context, a)) + .collect(Collectors.toList()); } catch (RemoteInvocationException e) { throw new RuntimeException("Failed to retrieve aspects from GMS", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 3fffe9fa019e7..7f9be9eb2706c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -1,17 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthUtil; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import com.linkedin.mxe.MetadataChangeProposal; @@ -22,26 +19,12 @@ @Slf4j public class DeleteUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); private DeleteUtils() {} public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups); + return AuthUtil.isAuthorizedEntityUrns( + context.getAuthorizer(), context.getActorUrn(), DELETE, List.of(entityUrn)); } public static void updateStatusForResources( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index 29447e6e7ef22..8c0a3ef7a95a6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -288,7 +288,7 @@ public static Entity getParentDomain( new DomainProperties( entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); final Urn parentDomainUrn = getParentDomainSafely(properties); - return parentDomainUrn != null ? UrnToEntityMapper.map(parentDomainUrn) : null; + return parentDomainUrn != null ? UrnToEntityMapper.map(context, parentDomainUrn) : null; } } catch (Exception e) { throw new RuntimeException( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java index 996bd3da120d6..ea487f3a94090 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -16,7 +17,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig.Privilege; import com.linkedin.r2.RemoteInvocationException; import java.net.URISyntaxException; -import java.util.Optional; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -32,8 +32,8 @@ private GlossaryUtils() {} * Nodes. */ public static boolean canManageGlossaries(@Nonnull QueryContext context) { - return AuthorizationUtils.isAuthorized( - context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return AuthUtil.isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } /** diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java index abc479ed18ebf..a51714123057d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java @@ -1,6 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.operation; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.ALL_PRIVILEGES_GROUP; import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; import static com.linkedin.metadata.Constants.*; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java index 839121a295d9a..4a5568619a5cf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java @@ -16,6 +16,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -50,7 +51,7 @@ public CompletableFuture get(DataFetchingEnvironment enviro context.getAuthentication(), System.currentTimeMillis()); log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); - return getOwnershipType(urn, context.getAuthentication()); + return getOwnershipType(context, urn, context.getAuthentication()); } catch (AuthorizationException e) { throw e; } catch (Exception e) { @@ -61,7 +62,9 @@ public CompletableFuture get(DataFetchingEnvironment enviro } private OwnershipTypeEntity getOwnershipType( - @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + @Nullable QueryContext context, + @Nonnull final Urn urn, + @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); // If there is no response, there is a problem. @@ -71,6 +74,6 @@ private OwnershipTypeEntity getOwnershipType( "Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", urn)); } - return OwnershipTypeMapper.map(maybeResponse); + return OwnershipTypeMapper.map(context, maybeResponse); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index 1d26aab85f463..4120401e0150f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -21,6 +21,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -68,7 +69,7 @@ public CompletableFuture get(final DataFetchingEnvironment e result.setStart(start); result.setCount(count); result.setTotal(policyFetchResult.getTotal()); - result.setPolicies(mapEntities(policyFetchResult.getPolicies())); + result.setPolicies(mapEntities(context, policyFetchResult.getPolicies())); return result; }); } @@ -76,11 +77,12 @@ public CompletableFuture get(final DataFetchingEnvironment e "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private List mapEntities(final List policies) { + private static List mapEntities( + @Nullable QueryContext context, final List policies) { return policies.stream() .map( policy -> { - Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); + Policy mappedPolicy = PolicyInfoPolicyMapper.map(context, policy.getPolicyInfo()); mappedPolicy.setUrn(policy.getUrn().toString()); return mappedPolicy; }) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java index d0446d218dac6..7babe63745f72 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java @@ -1,22 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.policy; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.metadata.Constants.POLICY_ENTITY_NAME; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthUtil; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.metadata.authorization.PoliciesConfig; +import java.util.List; import javax.annotation.Nonnull; public class PolicyAuthUtils { static boolean canManagePolicies(@Nonnull QueryContext context) { - final Authorizer authorizer = context.getAuthorizer(); - final String principal = context.getActorUrn(); - return isAuthorized( - principal, - ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), - authorizer); + return AuthUtil.isAuthorizedEntityType( + context.getActorUrn(), context.getAuthorizer(), MANAGE, List.of(POLICY_ENTITY_NAME)); } private PolicyAuthUtils() {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java index dcdf78ebc15bb..829aa0024c23a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java @@ -43,7 +43,7 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal; - final DataHubPolicyInfo info = PolicyUpdateInputInfoMapper.map(input); + final DataHubPolicyInfo info = PolicyUpdateInputInfoMapper.map(context, input); info.setLastUpdatedTimestamp(System.currentTimeMillis()); if (policyUrn.isPresent()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java index a350fb91f9d3b..2eaa08069a688 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.Policy; import com.linkedin.datahub.graphql.generated.PolicyMatchCondition; @@ -19,6 +20,7 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link @@ -28,12 +30,13 @@ public class PolicyInfoPolicyMapper implements ModelMapper mapValue(context, v)) .collect(Collectors.toList())) .setCondition( PolicyMatchCondition.valueOf(criterion.getCondition().name())) @@ -108,13 +113,14 @@ private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter .build(); } - private PolicyMatchCriterionValue mapValue(final String value) { + private static PolicyMatchCriterionValue mapValue( + @Nullable QueryContext context, final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); return PolicyMatchCriterionValue.builder() .setValue(value) - .setEntity(UrnToEntityMapper.map(urn)) + .setEntity(UrnToEntityMapper.map(context, urn)) .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java index d82d71295d41b..0397f764f61bd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java @@ -3,6 +3,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilterInput; import com.linkedin.datahub.graphql.generated.PolicyMatchFilterInput; import com.linkedin.datahub.graphql.generated.PolicyUpdateInput; @@ -18,6 +19,7 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. */ public class PolicyUpdateInputInfoMapper @@ -25,12 +27,14 @@ public class PolicyUpdateInputInfoMapper public static final PolicyUpdateInputInfoMapper INSTANCE = new PolicyUpdateInputInfoMapper(); - public static DataHubPolicyInfo map(@Nonnull final PolicyUpdateInput policyInput) { - return INSTANCE.apply(policyInput); + public static DataHubPolicyInfo map( + @Nullable QueryContext context, @Nonnull final PolicyUpdateInput policyInput) { + return INSTANCE.apply(context, policyInput); } @Override - public DataHubPolicyInfo apply(@Nonnull final PolicyUpdateInput policyInput) { + public DataHubPolicyInfo apply( + @Nullable QueryContext queryContext, @Nonnull final PolicyUpdateInput policyInput) { final DataHubPolicyInfo result = new DataHubPolicyInfo(); result.setDescription(policyInput.getDescription()); result.setType(policyInput.getType().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 34ef616c61e41..49d2a3eff70f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -80,7 +80,9 @@ public CompletableFuture get(final DataFetchingEnvironment envi result.setCount(gmsResult.getPageSize()); result.setTotal(gmsResult.getNumEntities()); result.setPosts( - entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); + entities.values().stream() + .map(e -> PostMapper.map(context, e)) + .collect(Collectors.toList())); return result; } catch (Exception e) { throw new RuntimeException("Failed to list posts", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java index 48f31fb75d371..03e1d625c1e77 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java @@ -69,7 +69,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm .collect(Collectors.toList()), authentication, System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + return QueryMapper.map( + context, _queryService.getQueryEntityResponse(queryUrn, authentication)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to create a new Query from input %s", input), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java index cc284aaf7b563..5d485d24866fc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java @@ -96,7 +96,8 @@ public CompletableFuture get(final DataFetchingEnvironment environm : null, authentication, System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + return QueryMapper.map( + context, _queryService.getQueryEntityResponse(queryUrn, authentication)); } catch (Exception e) { throw new RuntimeException( String.format("Failed to update Query from input %s", input), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index 86d1b8bab669c..c5c75d1e5c2c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -31,6 +31,7 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -63,7 +64,7 @@ public CompletableFuture get(DataFetchingEnvironment return ListRecommendationsResult.builder() .setModules( modules.stream() - .map(this::mapRecommendationModule) + .map(m -> mapRecommendationModule(context, m)) .filter(Optional::isPresent) .map(Optional::get) .collect(Collectors.toList())) @@ -123,6 +124,7 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq } private Optional mapRecommendationModule( + @Nullable QueryContext context, com.linkedin.metadata.recommendation.RecommendationModule module) { RecommendationModule mappedModule = new RecommendationModule(); mappedModule.setTitle(module.getTitle()); @@ -136,17 +138,18 @@ private Optional mapRecommendationModule( } mappedModule.setContent( module.getContent().stream() - .map(this::mapRecommendationContent) + .map(c -> mapRecommendationContent(context, c)) .collect(Collectors.toList())); return Optional.of(mappedModule); } private RecommendationContent mapRecommendationContent( + @Nullable QueryContext context, com.linkedin.metadata.recommendation.RecommendationContent content) { RecommendationContent mappedContent = new RecommendationContent(); mappedContent.setValue(content.getValue()); if (content.hasEntity()) { - mappedContent.setEntity(UrnToEntityMapper.map(content.getEntity())); + mappedContent.setEntity(UrnToEntityMapper.map(context, content.getEntity())); } if (content.hasParams()) { mappedContent.setParams(mapRecommendationParams(content.getParams())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index 5c0ea6651f67e..42ec8ff28db7d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -23,6 +23,7 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -74,7 +75,7 @@ public CompletableFuture get(final DataFetchingEnvironment envi result.setStart(gmsResult.getFrom()); result.setCount(gmsResult.getPageSize()); result.setTotal(gmsResult.getNumEntities()); - result.setRoles(mapEntitiesToRoles(entities.values())); + result.setRoles(mapEntitiesToRoles(context, entities.values())); return result; } catch (Exception e) { throw new RuntimeException("Failed to list roles", e); @@ -82,9 +83,10 @@ public CompletableFuture get(final DataFetchingEnvironment envi }); } - private List mapEntitiesToRoles(final Collection entities) { + private static List mapEntitiesToRoles( + @Nullable QueryContext context, final Collection entities) { return entities.stream() - .map(DataHubRoleMapper::map) + .map(e -> DataHubRoleMapper.map(context, e)) .sorted(Comparator.comparing(DataHubRole::getName)) .collect(Collectors.toList()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index 44b998219f01e..5aa59b19cb5bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -23,6 +23,7 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -62,13 +63,14 @@ public CompletableFuture get(DataFetchingEnvironment environme final Filter inputFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + final SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); final List facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; try { return mapAggregateResults( + context, _entityClient.searchAcrossEntities( context.getOperationContext().withSearchFlags(flags -> searchFlags), maybeResolvedView != null @@ -100,11 +102,12 @@ public CompletableFuture get(DataFetchingEnvironment environme }); } - AggregateResults mapAggregateResults(SearchResult searchResult) { + static AggregateResults mapAggregateResults( + @Nullable QueryContext context, SearchResult searchResult) { final AggregateResults results = new AggregateResults(); results.setFacets( searchResult.getMetadata().getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); return results; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index 21007bf228a70..ea6e329ba1a39 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -30,6 +30,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -63,8 +64,8 @@ public CompletableFuture get(final DataFetchingEnvironmen final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); - quickFilters.addAll(getPlatformQuickFilters(aggregations)); - quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); + quickFilters.addAll(getPlatformQuickFilters(context, aggregations)); + quickFilters.addAll(getEntityTypeQuickFilters(context, aggregations)); } catch (Exception e) { log.error("Failed getting quick filters", e); throw new RuntimeException("Failed to to get quick filters", e); @@ -113,7 +114,7 @@ private SearchResult getSearchResults( * top 5 to quick filters */ private List getPlatformQuickFilters( - @Nonnull final AggregationMetadataArray aggregations) { + @Nullable QueryContext context, @Nonnull final AggregationMetadataArray aggregations) { final List platforms = new ArrayList<>(); final Optional platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); @@ -125,7 +126,7 @@ private List getPlatformQuickFilters( sortedPlatforms.forEach( platformFilter -> { if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { - platforms.add(mapQuickFilter(PLATFORM, platformFilter)); + platforms.add(mapQuickFilter(context, PLATFORM, platformFilter)); } }); } @@ -141,7 +142,7 @@ private List getPlatformQuickFilters( * filters from a prioritized list. Do the same for datathub entity types. */ private List getEntityTypeQuickFilters( - @Nonnull final AggregationMetadataArray aggregations) { + @Nullable QueryContext context, @Nonnull final AggregationMetadataArray aggregations) { final List entityTypes = new ArrayList<>(); final Optional entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); @@ -149,6 +150,7 @@ private List getEntityTypeQuickFilters( if (entityAggregations.isPresent()) { final List sourceEntityTypeFilters = getQuickFiltersFromList( + context, SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get()); @@ -156,6 +158,7 @@ private List getEntityTypeQuickFilters( final List dataHubEntityTypeFilters = getQuickFiltersFromList( + context, SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get()); @@ -169,6 +172,7 @@ private List getEntityTypeQuickFilters( * until we reach the maxListSize defined */ private List getQuickFiltersFromList( + @Nullable QueryContext context, @Nonnull final List prioritizedList, final int maxListSize, @Nonnull final AggregationMetadata entityAggregations) { @@ -181,7 +185,7 @@ private List getQuickFiltersFromList( .filter(val -> val.getValue().equals(entityType)) .findFirst(); if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { - entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); + entityTypes.add(mapQuickFilter(context, ENTITY_FILTER_NAME, entityFilter.get())); } } }); @@ -190,13 +194,15 @@ private List getQuickFiltersFromList( } private QuickFilter mapQuickFilter( - @Nonnull final String field, @Nonnull final FilterValue filterValue) { + @Nullable QueryContext context, + @Nonnull final String field, + @Nonnull final FilterValue filterValue) { final boolean isEntityTypeFilter = field.equals(ENTITY_FILTER_NAME); final QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(convertFilterValue(filterValue.getValue(), isEntityTypeFilter)); if (filterValue.getEntity() != null) { - final Entity entity = UrnToEntityMapper.map(filterValue.getEntity()); + final Entity entity = UrnToEntityMapper.map(context, filterValue.getEntity()); quickFilter.setEntity(entity); } return quickFilter; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index 79101c9b6a48f..e4c224c4c8401 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -75,7 +75,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } else { searchFlags = null; } @@ -91,6 +91,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; return UrnScrollResultsMapper.map( + context, _entityClient.scrollAcrossEntities( context .getOperationContext() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index fa82cad6fffde..d0b955276f704 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -106,6 +106,7 @@ public CompletableFuture get(DataFetchingEnvironment searchFlags = null; } return UrnScrollAcrossLineageResultsMapper.map( + context, _entityClient.scrollAcrossLineage( context .getOperationContext() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index 6f4bcf937f2fe..0d7f217bb02db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -60,7 +60,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + SearchFlags searchFlags = mapInputFlags(context, input.getSearchFlags()); SortCriterion sortCriterion = input.getSortInput() != null ? mapSortCriterion(input.getSortInput().getSortCriterion()) @@ -76,6 +76,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) count); return UrnSearchResultsMapper.map( + context, _entityClient.searchAcrossEntities( context.getOperationContext().withSearchFlags(flags -> searchFlags), maybeResolvedView != null diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 9937dac2447f7..0d3430c9ab6a7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -131,7 +131,7 @@ public CompletableFuture get(DataFetchingEnvironment final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); if (inputFlags.getSkipHighlighting() == null) { searchFlags.setSkipHighlighting(true); } @@ -153,7 +153,7 @@ public CompletableFuture get(DataFetchingEnvironment startTimeMillis, endTimeMillis); - return UrnSearchAcrossLineageResultsMapper.map(salResults); + return UrnSearchAcrossLineageResultsMapper.map(context, salResults); } catch (RemoteInvocationException e) { log.error( "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 7d1b7d6ef2838..ed9838b7074c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -60,7 +60,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) final SearchFlags searchFlags; com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } else { searchFlags = applyDefaultSearchFlags(null, sanitizedQuery, SEARCH_RESOLVER_DEFAULTS); } @@ -79,6 +79,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) searchFlags); return UrnSearchResultsMapper.map( + context, _entityClient.search( context.getOperationContext().withSearchFlags(flags -> searchFlags), entityName, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index 8c45df1b30b26..c9d23fd0263a1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -19,6 +19,7 @@ import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.types.common.mappers.SearchFlagsInputMapper; @@ -287,10 +288,11 @@ public static Integer getMaxHops(List filters) { } public static SearchFlags mapInputFlags( + @Nullable QueryContext context, com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { SearchFlags searchFlags = null; if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(context, inputFlags); } return searchFlags; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java index d440c5cf05d8f..7d204f2970158 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/structuredproperties/UpsertStructuredPropertiesResolver.java @@ -97,7 +97,7 @@ public CompletableFuture> { @@ -81,7 +82,7 @@ public CompletableFuture get(final DataFetchingEnvironment envi result.setStart(gmsResult.getFrom()); result.setCount(gmsResult.getPageSize()); result.setTotal(gmsResult.getNumEntities()); - result.setUsers(mapEntities(entities.values())); + result.setUsers(mapEntities(context, entities.values())); return result; } catch (Exception e) { throw new RuntimeException("Failed to list users", e); @@ -92,7 +93,8 @@ public CompletableFuture get(final DataFetchingEnvironment envi "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private List mapEntities(final Collection entities) { - return entities.stream().map(CorpUserMapper::map).collect(Collectors.toList()); + private static List mapEntities( + @Nullable QueryContext context, final Collection entities) { + return entities.stream().map(e -> CorpUserMapper.map(context, e)).collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java index 5a52a57d9c374..4f209ad9472ab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java @@ -16,6 +16,7 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** Resolver responsible for updating a particular DataHub View */ @@ -49,7 +50,7 @@ public CompletableFuture get(final DataFetchingEnvironment environm context.getAuthentication(), System.currentTimeMillis()); log.info(String.format("Successfully updated View %s with urn", urn)); - return getView(urn, context.getAuthentication()); + return getView(context, urn, context.getAuthentication()); } throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); @@ -63,7 +64,9 @@ public CompletableFuture get(final DataFetchingEnvironment environm } private DataHubView getView( - @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + @Nullable QueryContext context, + @Nonnull final Urn urn, + @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _viewService.getViewEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { @@ -71,6 +74,6 @@ private DataHubView getView( String.format( "Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); } - return DataHubViewMapper.map(maybeResponse); + return DataHubViewMapper.map(context, maybeResponse); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index 3a676f118c1ac..e6078a22835f4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -80,9 +80,7 @@ public static boolean canUpdateView( } // If the View is Personal, then the current actor must be the owner. - return isViewOwner( - viewInfo.getCreated().getActor(), - UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); + return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getActorUrn())); } /** diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java index 00e9badf5e345..6e4259dde18c3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java @@ -1,23 +1,31 @@ package com.linkedin.datahub.graphql.types.aspect; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaMetadataMapper; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AspectMapper { public static final AspectMapper INSTANCE = new AspectMapper(); - public static Aspect map(@Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(aspect, entityUrn); + public static Aspect map( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, aspect, entityUrn); } - public Aspect apply(@Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + public Aspect apply( + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { if (Constants.SCHEMA_METADATA_ASPECT_NAME.equals(aspect.getName())) { - return SchemaMetadataMapper.map(aspect, entityUrn); + return SchemaMetadataMapper.map(context, aspect, entityUrn); } return null; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java index 45e80822b12c8..9542b4600cd2b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.aspect; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -46,6 +48,7 @@ public List> batchLoad( @Nonnull List keys, @Nonnull QueryContext context) throws Exception { try { + return keys.stream() .map( key -> { @@ -53,11 +56,13 @@ public List> batchLoad( Urn entityUrn = Urn.createFromString(key.getUrn()); Map response = - _entityClient.batchGetV2( - entityUrn.getEntityType(), - ImmutableSet.of(entityUrn), - ImmutableSet.of(key.getAspectName()), - context.getAuthentication()); + canView(context.getOperationContext(), entityUrn) + ? _entityClient.batchGetV2( + entityUrn.getEntityType(), + ImmutableSet.of(entityUrn), + ImmutableSet.of(key.getAspectName()), + context.getAuthentication()) + : Map.of(); EntityResponse entityResponse = response.get(entityUrn); @@ -69,7 +74,7 @@ public List> batchLoad( final EnvelopedAspect aspect = entityResponse.getAspects().get(key.getAspectName()); return DataFetcherResult.newResult() - .data(AspectMapper.map(aspect, entityUrn)) + .data(AspectMapper.map(context, aspect, entityUrn)) .build(); } catch (Exception e) { if (e instanceof RestLiResponseException) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index 43b7b5bb102ad..4d8d26559ff4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -4,6 +4,7 @@ import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.AssertionStdAggregation; import com.linkedin.datahub.graphql.generated.AssertionStdOperator; @@ -24,10 +25,11 @@ import com.linkedin.metadata.Constants; import java.util.Collections; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class AssertionMapper { - public static Assertion map(final EntityResponse entityResponse) { + public static Assertion map(@Nullable QueryContext context, final EntityResponse entityResponse) { final Assertion result = new Assertion(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -38,7 +40,8 @@ public static Assertion map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedAssertionInfo = aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); if (envelopedAssertionInfo != null) { - result.setInfo(mapAssertionInfo(new AssertionInfo(envelopedAssertionInfo.getValue().data()))); + result.setInfo( + mapAssertionInfo(context, new AssertionInfo(envelopedAssertionInfo.getValue().data()))); } final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); @@ -46,7 +49,7 @@ public static Assertion map(final EntityResponse entityResponse) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); result.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(Constants.UNKNOWN_DATA_PLATFORM); @@ -57,13 +60,13 @@ public static Assertion map(final EntityResponse entityResponse) { } private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertionInfo( - final AssertionInfo gmsAssertionInfo) { + @Nullable QueryContext context, final AssertionInfo gmsAssertionInfo) { final com.linkedin.datahub.graphql.generated.AssertionInfo assertionInfo = new com.linkedin.datahub.graphql.generated.AssertionInfo(); assertionInfo.setType(AssertionType.valueOf(gmsAssertionInfo.getType().name())); if (gmsAssertionInfo.hasDatasetAssertion()) { DatasetAssertionInfo datasetAssertion = - mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); + mapDatasetAssertionInfo(context, gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } assertionInfo.setDescription(gmsAssertionInfo.getDescription()); @@ -71,6 +74,7 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion } private static DatasetAssertionInfo mapDatasetAssertionInfo( + @Nullable QueryContext context, final com.linkedin.assertion.DatasetAssertionInfo gmsDatasetAssertion) { DatasetAssertionInfo datasetAssertion = new DatasetAssertionInfo(); datasetAssertion.setDatasetUrn(gmsDatasetAssertion.getDataset().toString()); @@ -103,7 +107,7 @@ private static DatasetAssertionInfo mapDatasetAssertionInfo( } if (gmsDatasetAssertion.hasNativeParameters()) { datasetAssertion.setNativeParameters( - StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); + StringMapMapper.map(context, gmsDatasetAssertion.getNativeParameters())); } else { datasetAssertion.setNativeParameters(Collections.emptyList()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java index ac5cce1191e5d..9b411033c1090 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java @@ -62,7 +62,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : assertionUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -72,7 +72,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(AssertionMapper.map(gmsResult)) + .data(AssertionMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java index bfe2ccbe34166..eeb4b91f3a1c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java @@ -60,7 +60,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(keys.size()); for (Urn urn : tokenInfoUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -70,7 +70,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(AccessTokenMetadataMapper.map(gmsResult)) + .data(AccessTokenMetadataMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java index a519a65e5cb6b..9c807bf0304ad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java @@ -2,6 +2,7 @@ import com.linkedin.access.token.DataHubAccessTokenInfo; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -10,17 +11,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AccessTokenMetadataMapper implements ModelMapper { public static final AccessTokenMetadataMapper INSTANCE = new AccessTokenMetadataMapper(); - public static AccessTokenMetadata map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static AccessTokenMetadata map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { + public AccessTokenMetadata apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse input) { final AccessTokenMetadata metadata = new AccessTokenMetadata(); metadata.setUrn(input.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index ffd5bffdd43d9..d5f976fa8f9b6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -117,6 +117,7 @@ public List> batchLoad( @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { + final Map chartMap = _entityClient.batchGetV2( CHART_ENTITY_NAME, @@ -124,7 +125,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(chartMap.getOrDefault(urn, null)); } @@ -134,7 +135,7 @@ public List> batchLoad( gmsChart == null ? null : DataFetcherResult.newResult() - .data(ChartMapper.map(gmsChart)) + .data(ChartMapper.map(context, gmsChart)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -159,7 +160,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -172,7 +173,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete(context.getOperationContext(), "chart", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -194,7 +195,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -202,7 +203,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c throws Exception { final StringArray result = _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } private ChartUrn getChartUrn(String urnStr) { @@ -219,9 +220,9 @@ public Chart update( @Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = ChartUpdateInputMapper.map(input, actor); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + ChartUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -242,7 +243,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.CHART_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index 6fe8f5238f682..561c3b9bec1e0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.chart.EditableChartProperties; @@ -17,6 +18,8 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AccessLevel; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.ChartEditableProperties; @@ -56,17 +59,20 @@ import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class ChartMapper implements ModelMapper { public static final ChartMapper INSTANCE = new ChartMapper(); - public static Chart map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Chart map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Chart apply(@Nonnull final EntityResponse entityResponse) { + public Chart apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Chart result = new Chart(); Urn entityUrn = entityResponse.getUrn(); @@ -79,62 +85,74 @@ public Chart apply(@Nonnull final EntityResponse entityResponse) { MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); mappingHelper.mapToResult( - CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); + CHART_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapChartInfo(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); mappingHelper.mapToResult( EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (chart, dataMap) -> - chart.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + chart.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (chart, dataMap) -> chart.setStatus(StatusMapper.map(new Status(dataMap)))); + (chart, dataMap) -> chart.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> this.mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (chart, dataMap) -> chart.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (chart, dataMap) -> - chart.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + chart.setGlossaryTerms( + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, ChartMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, ChartMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, - (chart, dataMap) -> chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + (chart, dataMap) -> + chart.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) -> - chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); + chart.setInputFields( + InputFieldsMapper.map(context, new InputFields(dataMap), entityUrn))); mappingHelper.mapToResult( - EMBED_ASPECT_NAME, (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + EMBED_ASPECT_NAME, + (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> - chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + chart.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((chart, dataMap) -> chart.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Chart.class); + } else { + return mappingHelper.getResult(); + } } private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { @@ -153,14 +171,20 @@ private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { } private void mapChartInfo( - @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull Chart chart, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); - chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); - chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); + chart.setInfo(mapInfo(context, gmsChartInfo, entityUrn)); + chart.setProperties(mapChartInfoToProperties(context, gmsChartInfo, entityUrn)); } /** Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} */ - private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + private ChartInfo mapInfo( + @Nonnull QueryContext context, + final com.linkedin.chart.ChartInfo info, + @Nonnull Urn entityUrn) { final ChartInfo result = new ChartInfo(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -184,10 +208,10 @@ private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn if (info.hasType()) { result.setType(ChartType.valueOf(info.getType().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } if (info.hasExternalUrl()) { result.setExternalUrl(info.getExternalUrl().toString()); @@ -202,8 +226,10 @@ private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn } /** Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} */ - private ChartProperties mapChartInfoToProperties( - final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + private static ChartProperties mapChartInfoToProperties( + @Nullable final QueryContext context, + final com.linkedin.chart.ChartInfo info, + @Nonnull Urn entityUrn) { final ChartProperties result = new ChartProperties(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -215,10 +241,10 @@ private ChartProperties mapChartInfoToProperties( if (info.hasType()) { result.setType(ChartType.valueOf(info.getType().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } if (info.hasExternalUrl()) { result.setExternalUrl(info.getExternalUrl().toString()); @@ -251,15 +277,19 @@ private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap d chart.setEditableProperties(chartEditableProperties); } - private void mapGlobalTags( - @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Chart chart, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); chart.setGlobalTags(globalTags); chart.setTags(globalTags); } - private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, @Nonnull Chart chart, @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); chart.setContainer( @@ -269,8 +299,9 @@ private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { .build()); } - private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Chart chart, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); + chart.setDomain(DomainAssociationMapper.map(context, domains, chart.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java index f2a434b58686c..806e537c6ec26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java @@ -8,6 +8,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ChartUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,6 +19,7 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class ChartUpdateInputMapper implements InputModelMapper, Urn> { @@ -25,13 +27,17 @@ public class ChartUpdateInputMapper public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); public static Collection map( - @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(chartUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final ChartUpdateInput chartUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, chartUpdateInput, actor); } @Override public Collection apply( - @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final ChartUpdateInput chartUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -41,7 +47,7 @@ public Collection apply( if (chartUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, chartUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -51,7 +57,7 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( chartUpdateInput.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } // Tags overrides global tags if provided @@ -59,7 +65,7 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( chartUpdateInput.getTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java index 4da18403f95cc..49c2d17ce0958 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java @@ -2,22 +2,28 @@ import com.linkedin.common.InputFields; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InputField; import com.linkedin.datahub.graphql.types.dataset.mappers.SchemaFieldMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InputFieldsMapper { public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); public static com.linkedin.datahub.graphql.generated.InputFields map( - @Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + @Nullable final QueryContext context, + @Nonnull final InputFields metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); } public com.linkedin.datahub.graphql.generated.InputFields apply( - @Nonnull final InputFields input, @Nonnull final Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final InputFields input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.InputFields result = new com.linkedin.datahub.graphql.generated.InputFields(); result.setFields( @@ -28,7 +34,7 @@ public com.linkedin.datahub.graphql.generated.InputFields apply( if (field.hasSchemaField()) { fieldResult.setSchemaField( - SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); + SchemaFieldMapper.map(context, field.getSchemaField(), entityUrn)); } if (field.hasSchemaFieldUrn()) { fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java index 1f952bb6a2bd1..851569a6cc582 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -13,12 +15,14 @@ public class AuditStampMapper implements ModelMapper { public static final BrowsePathsV2Mapper INSTANCE = new BrowsePathsV2Mapper(); - public static BrowsePathV2 map(@Nonnull final BrowsePathsV2 metadata) { - return INSTANCE.apply(metadata); + public static BrowsePathV2 map( + @Nullable QueryContext context, @Nonnull final BrowsePathsV2 metadata) { + return INSTANCE.apply(context, metadata); } @Override - public BrowsePathV2 apply(@Nonnull final BrowsePathsV2 input) { + public BrowsePathV2 apply(@Nullable QueryContext context, @Nonnull final BrowsePathsV2 input) { final BrowsePathV2 result = new BrowsePathV2(); final List path = - input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); + input.getPath().stream() + .map(p -> mapBrowsePathEntry(context, p)) + .collect(Collectors.toList()); result.setPath(path); return result; } - private BrowsePathEntry mapBrowsePathEntry(com.linkedin.common.BrowsePathEntry pathEntry) { + private BrowsePathEntry mapBrowsePathEntry( + @Nullable QueryContext context, com.linkedin.common.BrowsePathEntry pathEntry) { final BrowsePathEntry entry = new BrowsePathEntry(); entry.setName(pathEntry.getId()); if (pathEntry.hasUrn() && pathEntry.getUrn() != null) { - entry.setEntity(UrnToEntityMapper.map(pathEntry.getUrn())); + entry.setEntity(UrnToEntityMapper.map(context, pathEntry.getUrn())); } return entry; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java index e3a09bc8926a3..14fd1c82d5df7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ChangeAuditStamps; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; public class ChangeAuditStampsMapper implements ModelMapper { public static final ChangeAuditStampsMapper INSTANCE = new ChangeAuditStampsMapper(); - public static ChangeAuditStamps map(com.linkedin.common.ChangeAuditStamps input) { - return INSTANCE.apply(input); + public static ChangeAuditStamps map( + @Nullable QueryContext context, com.linkedin.common.ChangeAuditStamps input) { + return INSTANCE.apply(context, input); } @Override - public ChangeAuditStamps apply(com.linkedin.common.ChangeAuditStamps input) { + public ChangeAuditStamps apply( + @Nullable QueryContext context, com.linkedin.common.ChangeAuditStamps input) { ChangeAuditStamps changeAuditStamps = new ChangeAuditStamps(); - changeAuditStamps.setCreated(AuditStampMapper.map(input.getCreated())); - changeAuditStamps.setLastModified(AuditStampMapper.map(input.getLastModified())); + changeAuditStamps.setCreated(AuditStampMapper.map(context, input.getCreated())); + changeAuditStamps.setLastModified(AuditStampMapper.map(context, input.getLastModified())); if (input.hasDeleted()) { - changeAuditStamps.setDeleted(AuditStampMapper.map(input.getDeleted())); + changeAuditStamps.setDeleted(AuditStampMapper.map(context, input.getDeleted())); } return changeAuditStamps; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java index 806e8e6aadc5b..bb35a6da98418 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java @@ -1,24 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Cost; import com.linkedin.datahub.graphql.generated.CostType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.NonNull; public class CostMapper implements ModelMapper { public static final CostMapper INSTANCE = new CostMapper(); - public static Cost map(@NonNull final com.linkedin.common.Cost cost) { - return INSTANCE.apply(cost); + public static Cost map( + @Nullable QueryContext context, @NonNull final com.linkedin.common.Cost cost) { + return INSTANCE.apply(context, cost); } @Override - public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { + public Cost apply(@Nullable QueryContext context, @Nonnull final com.linkedin.common.Cost cost) { final Cost result = new Cost(); result.setCostType(CostType.valueOf(cost.getCostType().name())); - result.setCostValue(CostValueMapper.map(cost.getCost())); + result.setCostValue(CostValueMapper.map(context, cost.getCost())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java index 56c107f7ec059..c71c2274362b8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CostValue; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class CostValueMapper implements ModelMapper { public static final CostValueMapper INSTANCE = new CostValueMapper(); - public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { - return INSTANCE.apply(costValue); + public static CostValue map( + @Nullable QueryContext context, @NonNull final com.linkedin.common.CostValue costValue) { + return INSTANCE.apply(context, costValue); } @Override - public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { + public CostValue apply( + @Nullable QueryContext context, @NonNull final com.linkedin.common.CostValue costValue) { final CostValue result = new CostValue(); if (costValue.isCostCode()) { result.setCostCode(costValue.getCostCode()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index a2236f7e8586d..4345819867617 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformInstanceAspectMapper implements ModelMapper { @@ -12,12 +14,15 @@ public class DataPlatformInstanceAspectMapper new DataPlatformInstanceAspectMapper(); public static DataPlatformInstance map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { - return INSTANCE.apply(dataPlatformInstance); + return INSTANCE.apply(context, dataPlatformInstance); } @Override - public DataPlatformInstance apply(@Nonnull final com.linkedin.common.DataPlatformInstance input) { + public DataPlatformInstance apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.DataPlatformInstance input) { final DataPlatformInstance result = new DataPlatformInstance(); if (input.hasInstance()) { result.setType(EntityType.DATA_PLATFORM_INSTANCE); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java index 7a88474166915..8c3d72edfed25 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DeprecationMapper implements ModelMapper { public static final DeprecationMapper INSTANCE = new DeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { - return INSTANCE.apply(deprecation); + public static Deprecation map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Deprecation deprecation) { + return INSTANCE.apply(context, deprecation); } @Override - public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { + public Deprecation apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Deprecation input) { final Deprecation result = new Deprecation(); result.setActor(input.getActor().toString()); result.setDeprecated(input.isDeprecated()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java index 339c6a848d9f3..51801c43061e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Embed; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EmbedMapper implements ModelMapper { public static final EmbedMapper INSTANCE = new EmbedMapper(); - public static Embed map(@Nonnull final com.linkedin.common.Embed metadata) { - return INSTANCE.apply(metadata); + public static Embed map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Embed metadata) { + return INSTANCE.apply(context, metadata); } @Override - public Embed apply(@Nonnull final com.linkedin.common.Embed input) { + public Embed apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Embed input) { final Embed result = new Embed(); result.setRenderUrl(input.getRenderUrl()); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java index 77e53be8fac9f..cf0603d6d4973 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/GroupingCriterionInputMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.GroupingCriterion; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class GroupingCriterionInputMapper implements ModelMapper { @@ -12,12 +14,13 @@ public class GroupingCriterionInputMapper public static final GroupingCriterionInputMapper INSTANCE = new GroupingCriterionInputMapper(); public static com.linkedin.metadata.query.GroupingCriterion map( - @Nonnull final GroupingCriterion groupingCriterion) { - return INSTANCE.apply(groupingCriterion); + @Nullable QueryContext context, @Nonnull final GroupingCriterion groupingCriterion) { + return INSTANCE.apply(context, groupingCriterion); } @Override - public com.linkedin.metadata.query.GroupingCriterion apply(GroupingCriterion input) { + public com.linkedin.metadata.query.GroupingCriterion apply( + @Nullable QueryContext context, GroupingCriterion input) { return new com.linkedin.metadata.query.GroupingCriterion() .setBaseEntityType( input.getBaseEntityType() != null diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java index 4546e0e4d8dc0..c57e7fd30da98 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java @@ -1,25 +1,31 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemory; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMapper { public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); public static InstitutionalMemory map( - @Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(memory, entityUrn); + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemory memory, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, memory, entityUrn); } public InstitutionalMemory apply( - @Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.InstitutionalMemory input, + @Nonnull final Urn entityUrn) { final InstitutionalMemory result = new InstitutionalMemory(); result.setElements( input.getElements().stream() - .map(metadata -> InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)) + .map(metadata -> InstitutionalMemoryMetadataMapper.map(context, metadata, entityUrn)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java index 49a4618507086..7c6de02ecc876 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMetadataMapper { @@ -11,12 +13,14 @@ public class InstitutionalMemoryMetadataMapper { new InstitutionalMemoryMetadataMapper(); public static InstitutionalMemoryMetadata map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + return INSTANCE.apply(context, metadata, entityUrn); } public InstitutionalMemoryMetadata apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, @Nonnull final Urn entityUrn) { final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); @@ -24,7 +28,7 @@ public InstitutionalMemoryMetadata apply( result.setDescription(input.getDescription()); // deprecated field result.setLabel(input.getDescription()); result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); - result.setCreated(AuditStampMapper.map(input.getCreateStamp())); + result.setCreated(AuditStampMapper.map(context, input.getCreateStamp())); result.setAssociatedUrn(entityUrn.toString()); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java index 87d865471708e..0219f91e60e6d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java @@ -3,10 +3,12 @@ import com.linkedin.common.AuditStamp; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadataUpdate; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryMetadataUpdateMapper implements ModelMapper { @@ -15,12 +17,13 @@ public class InstitutionalMemoryMetadataUpdateMapper new InstitutionalMemoryMetadataUpdateMapper(); public static InstitutionalMemoryMetadata map( - @Nonnull final InstitutionalMemoryMetadataUpdate input) { - return INSTANCE.apply(input); + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryMetadataUpdate input) { + return INSTANCE.apply(context, input); } @Override - public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { + public InstitutionalMemoryMetadata apply( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryMetadataUpdate input) { final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); metadata.setDescription(input.getDescription()); metadata.setUrl(new Url(input.getUrl())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java index d8b451458e72c..d8bdd354d4ad5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java @@ -2,10 +2,12 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadataArray; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class InstitutionalMemoryUpdateMapper implements ModelMapper { @@ -13,17 +15,19 @@ public class InstitutionalMemoryUpdateMapper private static final InstitutionalMemoryUpdateMapper INSTANCE = new InstitutionalMemoryUpdateMapper(); - public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { - return INSTANCE.apply(input); + public static InstitutionalMemory map( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryUpdate input) { + return INSTANCE.apply(context, input); } @Override - public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { + public InstitutionalMemory apply( + @Nullable QueryContext context, @Nonnull final InstitutionalMemoryUpdate input) { final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); institutionalMemory.setElements( new InstitutionalMemoryMetadataArray( input.getElements().stream() - .map(InstitutionalMemoryMetadataUpdateMapper::map) + .map(e -> InstitutionalMemoryMetadataUpdateMapper.map(context, e)) .collect(Collectors.toList()))); return institutionalMemory; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java index 37b625715edd5..5ed6aa609946f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java @@ -3,6 +3,7 @@ import com.linkedin.common.Operation; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OperationSourceType; import com.linkedin.datahub.graphql.generated.OperationType; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; @@ -10,6 +11,7 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class OperationMapper implements TimeSeriesAspectMapper { @@ -17,13 +19,13 @@ public class OperationMapper public static final OperationMapper INSTANCE = new OperationMapper(); public static com.linkedin.datahub.graphql.generated.Operation map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.Operation apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { Operation gmsProfile = GenericRecordUtils.deserializeAspect( @@ -49,7 +51,7 @@ public com.linkedin.datahub.graphql.generated.Operation apply( result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); } if (gmsProfile.hasCustomProperties()) { - result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); + result.setCustomProperties(StringMapMapper.map(context, gmsProfile.getCustomProperties())); } if (gmsProfile.hasNumAffectedRows()) { result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java index ea15aefdad3b7..8b4f9a1f4ca50 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java @@ -4,6 +4,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; @@ -11,6 +12,7 @@ import com.linkedin.datahub.graphql.generated.OwnershipType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -22,11 +24,16 @@ public class OwnerMapper { public static final OwnerMapper INSTANCE = new OwnerMapper(); public static Owner map( - @Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(owner, entityUrn); + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Owner owner, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, owner, entityUrn); } - public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + public Owner apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Owner owner, + @Nonnull final Urn entityUrn) { final Owner result = new Owner(); // Deprecated result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); @@ -52,7 +59,7 @@ public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull fina result.setOwner(partialOwner); } if (owner.hasSource()) { - result.setSource(OwnershipSourceMapper.map(owner.getSource())); + result.setSource(OwnershipSourceMapper.map(context, owner.getSource())); } result.setAssociatedUrn(entityUrn.toString()); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java index a38c16d02f121..5cf680d88281f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java @@ -6,23 +6,25 @@ import com.linkedin.common.OwnershipType; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnerUpdate; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupUtils; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.net.URISyntaxException; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class OwnerUpdateMapper implements ModelMapper { private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); - public static Owner map(@Nonnull final OwnerUpdate input) { - return INSTANCE.apply(input); + public static Owner map(@Nullable QueryContext context, @Nonnull final OwnerUpdate input) { + return INSTANCE.apply(context, input); } @Override - public Owner apply(@Nonnull final OwnerUpdate input) { + public Owner apply(@Nullable QueryContext context, @Nonnull final OwnerUpdate input) { final Owner owner = new Owner(); try { if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java index 31f637a047798..a3a28717c9eb8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Ownership; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,17 +17,21 @@ public class OwnershipMapper { public static final OwnershipMapper INSTANCE = new OwnershipMapper(); public static Ownership map( - @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(ownership, entityUrn); + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Ownership ownership, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, ownership, entityUrn); } public Ownership apply( - @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.Ownership ownership, + @Nonnull final Urn entityUrn) { final Ownership result = new Ownership(); - result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); + result.setLastModified(AuditStampMapper.map(context, ownership.getLastModified())); result.setOwners( ownership.getOwners().stream() - .map(owner -> OwnerMapper.map(owner, entityUrn)) + .map(owner -> OwnerMapper.map(context, owner, entityUrn)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java index 75eaffb850a8b..12a38d9caa284 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnershipSource; import com.linkedin.datahub.graphql.generated.OwnershipSourceType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -16,12 +18,15 @@ public class OwnershipSourceMapper public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); public static OwnershipSource map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - return INSTANCE.apply(ownershipSource); + return INSTANCE.apply(context, ownershipSource); } @Override - public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + public OwnershipSource apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { final OwnershipSource result = new OwnershipSource(); result.setUrl(ownershipSource.getUrl()); result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java index 97afbc7ddf855..6ceccff8a9e76 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java @@ -5,26 +5,36 @@ import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.OwnershipUpdate; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class OwnershipUpdateMapper implements InputModelMapper { private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); - public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - return INSTANCE.apply(input, actor); + public static Ownership map( + @Nullable QueryContext context, + @NonNull final OwnershipUpdate input, + @NonNull final Urn actor) { + return INSTANCE.apply(context, input, actor); } @Override - public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + public Ownership apply( + @Nullable QueryContext context, + @NonNull final OwnershipUpdate input, + @NonNull final Urn actor) { final Ownership ownership = new Ownership(); ownership.setOwners( new OwnerArray( - input.getOwners().stream().map(OwnerUpdateMapper::map).collect(Collectors.toList()))); + input.getOwners().stream() + .map(o -> OwnerUpdateMapper.map(context, o)) + .collect(Collectors.toList()))); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index 16f1909af09f6..e6b75f9482f59 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.metadata.query.GroupingCriterionArray; import com.linkedin.metadata.query.GroupingSpec; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps GraphQL SearchFlags to Pegasus @@ -18,12 +20,13 @@ public class SearchFlagsInputMapper public static final SearchFlagsInputMapper INSTANCE = new SearchFlagsInputMapper(); public static com.linkedin.metadata.query.SearchFlags map( - @Nonnull final SearchFlags searchFlags) { - return INSTANCE.apply(searchFlags); + @Nullable QueryContext context, @Nonnull final SearchFlags searchFlags) { + return INSTANCE.apply(context, searchFlags); } @Override - public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags searchFlags) { + public com.linkedin.metadata.query.SearchFlags apply( + @Nullable QueryContext context, @Nonnull final SearchFlags searchFlags) { com.linkedin.metadata.query.SearchFlags result = new com.linkedin.metadata.query.SearchFlags(); if (searchFlags.getFulltext() != null) { result.setFulltext(searchFlags.getFulltext()); @@ -58,7 +61,7 @@ public com.linkedin.metadata.query.SearchFlags apply(@Nonnull final SearchFlags .setGroupingCriteria( new GroupingCriterionArray( searchFlags.getGroupingSpec().getGroupingCriteria().stream() - .map(GroupingCriterionInputMapper::map) + .map(c -> GroupingCriterionInputMapper.map(context, c)) .collect(Collectors.toList())))); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java index 0758daf5df2e7..eea4bcd4a28d2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SiblingProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,16 +19,21 @@ public class SiblingsMapper public static final SiblingsMapper INSTANCE = new SiblingsMapper(); - public static SiblingProperties map(@Nonnull final com.linkedin.common.Siblings siblings) { - return INSTANCE.apply(siblings); + public static SiblingProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Siblings siblings) { + return INSTANCE.apply(context, siblings); } @Override - public SiblingProperties apply(@Nonnull final com.linkedin.common.Siblings siblings) { + public SiblingProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Siblings siblings) { final SiblingProperties result = new SiblingProperties(); result.setIsPrimary(siblings.isPrimary()); result.setSiblings( - siblings.getSiblings().stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); + siblings.getSiblings().stream() + .filter(s -> context == null | canView(context.getOperationContext(), s)) + .map(s -> UrnToEntityMapper.map(context, s)) + .collect(Collectors.toList())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java index 2d1efdffc496c..f4f829a046f2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Status; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class StatusMapper implements ModelMapper { public static final StatusMapper INSTANCE = new StatusMapper(); - public static Status map(@Nonnull final com.linkedin.common.Status metadata) { - return INSTANCE.apply(metadata); + public static Status map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Status metadata) { + return INSTANCE.apply(context, metadata); } @Override - public Status apply(@Nonnull final com.linkedin.common.Status input) { + public Status apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.Status input) { final Status result = new Status(); result.setRemoved(input.isRemoved()); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java index 0e8d6822b7d09..4175fdb202865 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.StringMapEntry; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.ArrayList; import java.util.List; import java.util.Map; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -16,12 +18,14 @@ public class StringMapMapper implements ModelMapper, List map(@Nonnull final Map input) { - return INSTANCE.apply(input); + public static List map( + @Nullable QueryContext context, @Nonnull final Map input) { + return INSTANCE.apply(context, input); } @Override - public List apply(@Nonnull final Map input) { + public List apply( + @Nullable QueryContext context, @Nonnull final Map input) { List results = new ArrayList<>(); for (String key : input.keySet()) { final StringMapEntry entry = new StringMapEntry(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java index 55294e4b46822..924ee92d2f00f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.SubTypes; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.ArrayList; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SubTypesMapper implements ModelMapper { @@ -11,12 +13,13 @@ public class SubTypesMapper public static final SubTypesMapper INSTANCE = new SubTypesMapper(); public static com.linkedin.datahub.graphql.generated.SubTypes map( - @Nonnull final SubTypes metadata) { - return INSTANCE.apply(metadata); + @Nullable QueryContext context, @Nonnull final SubTypes metadata) { + return INSTANCE.apply(context, metadata); } @Override - public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { + public com.linkedin.datahub.graphql.generated.SubTypes apply( + @Nullable QueryContext context, @Nonnull final SubTypes input) { final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); result.setTypeNames(new ArrayList<>(input.getTypeNames())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 18050b1193755..00f2a0df7512c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.Container; @@ -40,16 +41,18 @@ import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UrnToEntityMapper implements ModelMapper { public static final UrnToEntityMapper INSTANCE = new UrnToEntityMapper(); - public static Entity map(@Nonnull final com.linkedin.common.urn.Urn urn) { - return INSTANCE.apply(urn); + public static Entity map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.urn.Urn urn) { + return INSTANCE.apply(context, urn); } @Override - public Entity apply(Urn input) { + public Entity apply(@Nullable QueryContext context, Urn input) { Entity partialEntity = null; if (input.getEntityType().equals("dataset")) { partialEntity = new Dataset(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java index 0b156f11e8834..d9eab8e1ce949 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.EnvelopedAspectMap; import java.util.function.BiConsumer; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.AllArgsConstructor; import lombok.Getter; +import org.apache.commons.lang3.function.TriConsumer; @AllArgsConstructor public class MappingHelper { @@ -18,4 +21,14 @@ public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer consumer) { + if (_aspectMap.containsKey(aspectName)) { + DataMap dataMap = _aspectMap.get(aspectName).getValue().data(); + consumer.accept(context, result, dataMap); + } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index 6408e2d6779fc..d1cd9528322cf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -90,7 +90,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : containerUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -100,7 +100,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(ContainerMapper.map(gmsResult)) + .data(ContainerMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -133,7 +133,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -147,6 +147,6 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), ENTITY_NAME, query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index 55d38f1fd2343..2c0dc142bee3d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -15,6 +15,7 @@ import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; @@ -42,7 +43,8 @@ public class ContainerMapper { @Nullable - public static Container map(final EntityResponse entityResponse) { + public static Container map( + @Nullable final QueryContext context, final EntityResponse entityResponse) { final Container result = new Container(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -58,7 +60,7 @@ public static Container map(final EntityResponse entityResponse) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); result.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); @@ -85,20 +87,22 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { result.setOwnership( - OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + OwnershipMapper.map( + context, new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(envelopedTags.getValue().data()), entityUrn); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { result.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); + GlossaryTermsMapper.map( + context, new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedInstitutionalMemory = @@ -106,17 +110,20 @@ public static Container map(final EntityResponse entityResponse) { if (envelopedInstitutionalMemory != null) { result.setInstitutionalMemory( InstitutionalMemoryMapper.map( - new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + context, + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), + entityUrn)); } final EnvelopedAspect statusAspect = aspects.get(Constants.STATUS_ASPECT_NAME); if (statusAspect != null) { - result.setStatus(StatusMapper.map(new Status(statusAspect.getValue().data()))); + result.setStatus(StatusMapper.map(context, new Status(statusAspect.getValue().data()))); } final EnvelopedAspect envelopedSubTypes = aspects.get(Constants.SUB_TYPES_ASPECT_NAME); if (envelopedSubTypes != null) { - result.setSubTypes(SubTypesMapper.map(new SubTypes(envelopedSubTypes.getValue().data()))); + result.setSubTypes( + SubTypesMapper.map(context, new SubTypes(envelopedSubTypes.getValue().data()))); } final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); @@ -134,20 +141,20 @@ public static Container map(final EntityResponse entityResponse) { if (envelopedDomains != null) { final Domains domains = new Domains(envelopedDomains.getValue().data()); // Currently we only take the first domain if it exists. - result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); + result.setDomain(DomainAssociationMapper.map(context, domains, entityUrn.toString())); } final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { result.setDeprecation( - DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); + DeprecationMapper.map(context, new Deprecation(envelopedDeprecation.getValue().data()))); } final EnvelopedAspect envelopedStructuredProps = aspects.get(STRUCTURED_PROPERTIES_ASPECT_NAME); if (envelopedStructuredProps != null) { result.setStructuredProperties( StructuredPropertiesMapper.map( - new StructuredProperties(envelopedStructuredProps.getValue().data()))); + context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); } final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 3e82c543a0098..9fa2a71251c6d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -88,7 +88,7 @@ public List> batchLoad( null, context.getAuthentication()); - final List results = new ArrayList<>(); + final List results = new ArrayList<>(urns.size()); for (Urn urn : corpGroupUrns) { results.add(corpGroupMap.getOrDefault(urn, null)); } @@ -98,7 +98,7 @@ public List> batchLoad( gmsCorpGroup == null ? null : DataFetcherResult.newResult() - .data(CorpGroupMapper.map(gmsCorpGroup)) + .data(CorpGroupMapper.map(context, gmsCorpGroup)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -122,7 +122,7 @@ public SearchResults search( Collections.emptyMap(), start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -136,7 +136,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "corpGroup", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -189,7 +189,7 @@ private boolean isAuthorizedToUpdate( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java index a7fde4f42a679..ed22bb06fd5c2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java @@ -2,9 +2,11 @@ import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -24,12 +26,14 @@ public class CorpGroupEditablePropertiesMapper new CorpGroupEditablePropertiesMapper(); public static CorpGroupEditableProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { - return INSTANCE.apply(corpGroupEditableInfo); + return INSTANCE.apply(context, corpGroupEditableInfo); } @Override public CorpGroupEditableProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { final CorpGroupEditableProperties result = new CorpGroupEditableProperties(); result.setDescription(corpGroupEditableInfo.getDescription(GetMode.DEFAULT)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java index 04d0cc8ce94e6..918d7f19b99f1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -17,12 +19,14 @@ public class CorpGroupInfoMapper public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); public static CorpGroupInfo map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); + return INSTANCE.apply(context, corpGroupInfo); } @Override - public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + public CorpGroupInfo apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo info) { final CorpGroupInfo result = new CorpGroupInfo(); result.setEmail(info.getEmail()); result.setDescription(info.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 8fe47e9b2144a..6246cf64bbf7f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; @@ -21,6 +22,7 @@ import com.linkedin.metadata.key.CorpGroupKey; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -31,12 +33,14 @@ public class CorpGroupMapper implements ModelMapper { public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); - public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static CorpGroup map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { + public CorpGroup apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final CorpGroup result = new CorpGroup(); Urn entityUrn = entityResponse.getUrn(); @@ -45,15 +49,17 @@ public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); - mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); - mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult(context, CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); mappingHelper.mapToResult( - OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); + context, CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (entity, dataMap) -> this.mapOwnership(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -74,20 +80,25 @@ private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap data corpGroup.setName(corpGroupKey.getName()); } - private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + private void mapCorpGroupInfo( + @Nullable QueryContext context, @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); - corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); - corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); + corpGroup.setProperties(CorpGroupPropertiesMapper.map(context, corpGroupInfo)); + corpGroup.setInfo(CorpGroupInfoMapper.map(context, corpGroupInfo)); } - private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + private void mapCorpGroupEditableInfo( + @Nullable QueryContext context, @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { corpGroup.setEditableProperties( - CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); + CorpGroupEditablePropertiesMapper.map(context, new CorpGroupEditableInfo(dataMap))); } private void mapOwnership( - @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); + @Nullable QueryContext context, + @Nonnull CorpGroup corpGroup, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + corpGroup.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn)); } private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java index 29d0482863971..3feef06b6cbb0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -16,12 +18,14 @@ public class CorpGroupPropertiesMapper public static final CorpGroupPropertiesMapper INSTANCE = new CorpGroupPropertiesMapper(); public static CorpGroupProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); + return INSTANCE.apply(context, corpGroupInfo); } @Override - public CorpGroupProperties apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + public CorpGroupProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpGroupInfo info) { final CorpGroupProperties result = new CorpGroupProperties(); result.setEmail(info.getEmail()); result.setDescription(info.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index 3dab88fcc300b..7b4f2bcf00d97 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -87,7 +87,7 @@ public List> batchLoad( null, context.getAuthentication()); - final List results = new ArrayList<>(); + final List results = new ArrayList<>(urns.size()); for (Urn urn : corpUserUrns) { results.add(corpUserMap.getOrDefault(urn, null)); } @@ -97,7 +97,7 @@ public List> batchLoad( gmsCorpUser == null ? null : DataFetcherResult.newResult() - .data(CorpUserMapper.map(gmsCorpUser, _featureFlags)) + .data(CorpUserMapper.map(context, gmsCorpUser, _featureFlags)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -121,7 +121,7 @@ public SearchResults search( Collections.emptyMap(), start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -135,7 +135,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "corpuser", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } public Class inputClass() { @@ -180,7 +180,7 @@ private boolean isAuthorizedToUpdate( return context.getActorUrn().equals(urn) || AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java index 3ee353293393e..1ff2f069b8112 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,12 +17,14 @@ public class CorpUserEditableInfoMapper public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); public static CorpUserEditableProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - return INSTANCE.apply(info); + return INSTANCE.apply(context, info); } @Override public CorpUserEditableProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { final CorpUserEditableProperties result = new CorpUserEditableProperties(); result.setDisplayName(info.getDisplayName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java index 9044f4d510bcf..a728ea3695b50 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,12 +17,15 @@ public class CorpUserInfoMapper public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); - public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); + public static CorpUserInfo map( + @Nullable QueryContext context, + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(context, corpUserInfo); } @Override - public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + public CorpUserInfo apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo info) { final CorpUserInfo result = new CorpUserInfo(); result.setActive(info.isActive()); result.setCountryCode(info.getCountryCode()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index a9d92dd9d235f..4fa278983399b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserAppearanceSettings; @@ -41,17 +42,22 @@ public class CorpUserMapper { public static final CorpUserMapper INSTANCE = new CorpUserMapper(); - public static CorpUser map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse, null); + public static CorpUser map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse, null); } public static CorpUser map( - @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - return INSTANCE.apply(entityResponse, featureFlags); + @Nullable QueryContext context, + @Nonnull final EntityResponse entityResponse, + @Nullable final FeatureFlags featureFlags) { + return INSTANCE.apply(context, entityResponse, featureFlags); } public CorpUser apply( - @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + @Nullable QueryContext context, + @Nonnull final EntityResponse entityResponse, + @Nullable final FeatureFlags featureFlags) { final CorpUser result = new CorpUser(); Urn entityUrn = entityResponse.getUrn(); @@ -62,26 +68,27 @@ public CorpUser apply( mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); mappingHelper.mapToResult( CORP_USER_INFO_ASPECT_NAME, - (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); + (corpUser, dataMap) -> this.mapCorpUserInfo(context, corpUser, dataMap, entityUrn)); mappingHelper.mapToResult( CORP_USER_EDITABLE_INFO_ASPECT_NAME, (corpUser, dataMap) -> corpUser.setEditableProperties( - CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); + CorpUserEditableInfoMapper.map(context, new CorpUserEditableInfo(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + corpUser.setGlobalTags( + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); mappingHelper.mapToResult( CORP_USER_STATUS_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + corpUser.setStatus(CorpUserStatusMapper.map(context, new CorpUserStatus(dataMap)))); mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -151,10 +158,13 @@ private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap } private void mapCorpUserInfo( - @Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull CorpUser corpUser, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); - corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); - corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); + corpUser.setProperties(CorpUserPropertiesMapper.map(context, corpUserInfo)); + corpUser.setInfo(CorpUserInfoMapper.map(context, corpUserInfo)); CorpUserProperties corpUserProperties = corpUser.getProperties(); if (corpUserInfo.hasCustomProperties()) { corpUserProperties.setCustomProperties( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java index 106e3de661201..738ae68cd756d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class CorpUserPropertiesMapper @@ -12,12 +14,14 @@ public class CorpUserPropertiesMapper public static final CorpUserPropertiesMapper INSTANCE = new CorpUserPropertiesMapper(); public static CorpUserProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); + return INSTANCE.apply(context, corpUserInfo); } @Override - public CorpUserProperties apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + public CorpUserProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserInfo info) { final CorpUserProperties result = new CorpUserProperties(); result.setActive(info.isActive()); result.setCountryCode(info.getCountryCode()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java index dd9e465a2d4ea..eb31754a9f0f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class CorpUserStatusMapper implements ModelMapper { @@ -10,12 +12,14 @@ public class CorpUserStatusMapper public static final CorpUserStatusMapper INSTANCE = new CorpUserStatusMapper(); public static CorpUserStatus map( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { - return INSTANCE.apply(corpUserStatus); + return INSTANCE.apply(context, corpUserStatus); } @Override - public CorpUserStatus apply(@Nonnull final com.linkedin.identity.CorpUserStatus status) { + public CorpUserStatus apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.identity.CorpUserStatus status) { // Warning- if the backend provides an unexpected value this will fail. return CorpUserStatus.valueOf(status.getStatus()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 4efcb42cf8e3c..3d53c1e474b9d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -123,7 +123,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(dashboardMap.getOrDefault(urn, null)); } @@ -133,7 +133,7 @@ public List> batchLoad( gmsDashboard == null ? null : DataFetcherResult.newResult() - .data(DashboardMapper.map(gmsDashboard)) + .data(DashboardMapper.map(context, gmsDashboard)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -158,7 +158,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -172,7 +172,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "dashboard", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -194,7 +194,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -202,7 +202,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c throws Exception { final StringArray result = _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { @@ -219,10 +219,9 @@ public Dashboard update( @Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DashboardUpdateInputMapper.map(input, actor); + DashboardUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -243,7 +242,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 5826822c628df..4fa52b1136564 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -17,6 +18,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AccessLevel; import com.linkedin.datahub.graphql.generated.Chart; import com.linkedin.datahub.graphql.generated.Container; @@ -54,17 +57,20 @@ import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DashboardMapper implements ModelMapper { public static final DashboardMapper INSTANCE = new DashboardMapper(); - public static Dashboard map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Dashboard map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Dashboard apply(@Nonnull final EntityResponse entityResponse) { + public Dashboard apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Dashboard result = new Dashboard(); Urn entityUrn = entityResponse.getUrn(); @@ -78,64 +84,75 @@ public Dashboard apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); mappingHelper.mapToResult( DASHBOARD_INFO_ASPECT_NAME, - (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); + (entity, dataMap) -> this.mapDashboardInfo(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dashboard.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); + (dashboard, dataMap) -> + dashboard.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dashboard, dataMap) -> dashboard.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DashboardMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DashboardMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dashboard.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); + dashboard.setInputFields( + InputFieldsMapper.map(context, new InputFields(dataMap), entityUrn))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); mappingHelper.mapToResult( EMBED_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + dashboard.setBrowsePathV2( + BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((dashboard, dataMap) -> dashboard.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Dashboard.class); + } else { + return mappingHelper.getResult(); + } } private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { @@ -154,18 +171,24 @@ private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap data } private void mapDashboardInfo( - @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { + @Nonnull QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap, + Urn entityUrn) { final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = new com.linkedin.dashboard.DashboardInfo(dataMap); - dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); - dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + dashboard.setInfo(mapInfo(context, gmsDashboardInfo, entityUrn)); + dashboard.setProperties(mapDashboardInfoToProperties(context, gmsDashboardInfo, entityUrn)); } /** * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link * DashboardInfo} */ - private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + private static DashboardInfo mapInfo( + @Nullable final QueryContext context, + final com.linkedin.dashboard.DashboardInfo info, + Urn entityUrn) { final DashboardInfo result = new DashboardInfo(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -191,10 +214,10 @@ private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, U if (info.hasAccess()) { result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } return result; } @@ -203,8 +226,10 @@ private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, U * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link * DashboardProperties} */ - private DashboardProperties mapDashboardInfoToProperties( - final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + private static DashboardProperties mapDashboardInfoToProperties( + @Nullable final QueryContext context, + final com.linkedin.dashboard.DashboardInfo info, + Urn entityUrn) { final DashboardProperties result = new DashboardProperties(); result.setDescription(info.getDescription()); result.setName(info.getTitle()); @@ -222,10 +247,10 @@ private DashboardProperties mapDashboardInfoToProperties( if (info.hasAccess()) { result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + result.setLastModified(AuditStampMapper.map(context, info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(context, info.getLastModified().getCreated())); if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + result.setDeleted(AuditStampMapper.map(context, info.getLastModified().getDeleted())); } return result; } @@ -240,15 +265,21 @@ private void mapEditableDashboardProperties( dashboard.setEditableProperties(dashboardEditableProperties); } - private void mapGlobalTags( - @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dashboard.setGlobalTags(globalTags); dashboard.setTags(globalTags); } - private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); dashboard.setContainer( @@ -258,8 +289,11 @@ private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMa .build()); } - private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull Dashboard dashboard, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); + dashboard.setDomain(DomainAssociationMapper.map(context, domains, dashboard.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java index 6212663ee87e4..d004fb70d4105 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java @@ -8,6 +8,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,19 +19,24 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DashboardUpdateInputMapper implements InputModelMapper, Urn> { public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); public static Collection map( - @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(dashboardUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DashboardUpdateInput dashboardUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dashboardUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DashboardUpdateInput dashboardUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); @@ -41,7 +47,7 @@ public Collection apply( if (dashboardUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, dashboardUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -51,14 +57,14 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( dashboardUpdateInput.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } else { // Tags override global tags globalTags.setTags( new TagAssociationArray( dashboardUpdateInput.getTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java index 782ec3d3a6c07..a5abb57672b42 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java @@ -1,21 +1,25 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DashboardUsageMetricMapper implements TimeSeriesAspectMapper { public static final DashboardUsageMetricMapper INSTANCE = new DashboardUsageMetricMapper(); - public static DashboardUsageMetrics map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + public static DashboardUsageMetrics map( + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override - public DashboardUsageMetrics apply(EnvelopedAspect envelopedAspect) { + public DashboardUsageMetrics apply( + @Nullable QueryContext context, EnvelopedAspect envelopedAspect) { com.linkedin.dashboard.DashboardUsageStatistics gmsDashboardUsageStatistics = GenericRecordUtils.deserializeAspect( envelopedAspect.getAspect().getValue(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index fdb0bd603b27a..a3a631d450254 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -117,7 +117,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(dataFlowMap.getOrDefault(urn, null)); } @@ -127,7 +127,7 @@ public List> batchLoad( gmsDataFlow == null ? null : DataFetcherResult.newResult() - .data(DataFlowMapper.map(gmsDataFlow)) + .data(DataFlowMapper.map(context, gmsDataFlow)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -152,7 +152,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -166,7 +166,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "dataFlow", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -188,7 +188,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -197,7 +197,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c final StringArray result = _entityClient.getBrowsePaths( DataFlowUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } @Override @@ -206,10 +206,9 @@ public DataFlow update( throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DataFlowUpdateInputMapper.map(input, actor); + DataFlowUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -230,7 +229,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 44e2c185a93c5..9e2612f60abda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -13,6 +14,8 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataFlowEditableProperties; import com.linkedin.datahub.graphql.generated.DataFlowInfo; @@ -43,17 +46,20 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataFlowMapper implements ModelMapper { public static final DataFlowMapper INSTANCE = new DataFlowMapper(); - public static DataFlow map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataFlow map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataFlow apply(@Nonnull final EntityResponse entityResponse) { + public DataFlow apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataFlow result = new DataFlow(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATA_FLOW); @@ -72,48 +78,53 @@ public DataFlow apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataFlow.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); + (dataFlow, dataMap) -> mapGlobalTags(context, dataFlow, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataFlow, dataMap) -> dataFlow.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataFlow, dataMap) -> dataFlow.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DataFlowMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataFlow.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), DataFlow.class); + } else { + return mappingHelper.getResult(); + } } private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { @@ -183,17 +194,21 @@ private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataFlow.setEditableProperties(dataFlowEditableProperties); } - private void mapGlobalTags( - @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull DataFlow dataFlow, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataFlow.setGlobalTags(globalTags); dataFlow.setTags(globalTags); } - private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + dataFlow.setDomain(DomainAssociationMapper.map(context, domains, dataFlow.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java index 87579a15d586e..cb9b6f66c6eab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataFlowUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,19 +19,24 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataFlowUpdateInputMapper implements InputModelMapper, Urn> { public static final DataFlowUpdateInputMapper INSTANCE = new DataFlowUpdateInputMapper(); public static Collection map( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(dataFlowUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dataFlowUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -40,7 +46,7 @@ public Collection apply( if (dataFlowUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, dataFlowUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -50,13 +56,13 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( dataFlowUpdateInput.getGlobalTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( dataFlowUpdateInput.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index 5127bee3f8a8c..0f69724e1e430 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -112,6 +112,7 @@ public List> batchLoad( final List urnStrs, @Nonnull final QueryContext context) throws Exception { final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { + final Map dataJobMap = _entityClient.batchGetV2( Constants.DATA_JOB_ENTITY_NAME, @@ -119,7 +120,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(dataJobMap.getOrDefault(urn, null)); } @@ -129,7 +130,7 @@ public List> batchLoad( gmsDataJob == null ? null : DataFetcherResult.newResult() - .data(DataJobMapper.map(gmsDataJob)) + .data(DataJobMapper.map(context, gmsDataJob)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -154,7 +155,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -167,7 +168,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete(context.getOperationContext(), "dataJob", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -189,7 +190,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -197,7 +198,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c throws Exception { final StringArray result = _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } @Override @@ -205,10 +206,9 @@ public DataJob update( @Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DataJobUpdateInputMapper.map(input, actor); + DataJobUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -229,7 +229,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index c4b3ce95d983d..d7da875bc2a29 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.google.common.collect.ImmutableList; @@ -15,6 +16,8 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataFlow; import com.linkedin.datahub.graphql.generated.DataJob; import com.linkedin.datahub.graphql.generated.DataJobEditableProperties; @@ -47,17 +50,20 @@ import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataJobMapper implements ModelMapper { public static final DataJobMapper INSTANCE = new DataJobMapper(); - public static DataJob map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataJob map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataJob apply(@Nonnull final EntityResponse entityResponse) { + public DataJob apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataJob result = new DataJob(); Urn entityUrn = entityResponse.getUrn(); @@ -75,8 +81,10 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) { DataMap data = aspect.getValue().data(); if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { final DataJobKey gmsKey = new DataJobKey(data); - result.setDataFlow( - new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + if (context == null || canView(context.getOperationContext(), gmsKey.getFlow())) { + result.setDataFlow( + new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + } result.setJobId(gmsKey.getJobId()); } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = @@ -96,42 +104,48 @@ public DataJob apply(@Nonnull final EntityResponse entityResponse) { editableDataJobProperties.getDescription()); result.setEditableProperties(dataJobEditableProperties); } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { - result.setOwnership(OwnershipMapper.map(new Ownership(data), entityUrn)); + result.setOwnership(OwnershipMapper.map(context, new Ownership(data), entityUrn)); } else if (STATUS_ASPECT_NAME.equals(name)) { - result.setStatus(StatusMapper.map(new Status(data))); + result.setStatus(StatusMapper.map(context, new Status(data))); } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(data), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(data), entityUrn); result.setGlobalTags(globalTags); result.setTags(globalTags); } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { result.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(data), entityUrn)); } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { result.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); + GlossaryTermsMapper.map(context, new GlossaryTerms(data), entityUrn)); } else if (DOMAINS_ASPECT_NAME.equals(name)) { final Domains domains = new Domains(data); // Currently we only take the first domain if it exists. - result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); + result.setDomain( + DomainAssociationMapper.map(context, domains, entityUrn.toString())); } else if (DEPRECATION_ASPECT_NAME.equals(name)) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(data))); + result.setDeprecation(DeprecationMapper.map(context, new Deprecation(data))); } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { result.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(data))); } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { - result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); + result.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(data))); } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { - result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); + result.setSubTypes(SubTypesMapper.map(context, new SubTypes(data))); } else if (STRUCTURED_PROPERTIES_ASPECT_NAME.equals(name)) { result.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(data))); + StructuredPropertiesMapper.map(context, new StructuredProperties(data))); } else if (FORMS_ASPECT_NAME.equals(name)) { result.setForms(FormsMapper.map(new Forms(data), entityUrn.toString())); } }); - return result; + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, DataJob.class); + } else { + return result; + } } /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} */ diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java index b0f299e00b4ba..a1d0123d3a521 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataJobUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,19 +19,24 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataJobUpdateInputMapper implements InputModelMapper, Urn> { public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); public static Collection map( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(dataJobUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DataJobUpdateInput dataJobUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, dataJobUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DataJobUpdateInput dataJobUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); @@ -41,7 +47,7 @@ public Collection apply( if (dataJobUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, dataJobUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -51,13 +57,13 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( dataJobUpdateInput.getGlobalTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( dataJobUpdateInput.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java index 567d275dbee0a..7e939719a3ec4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java @@ -58,7 +58,7 @@ public List> batchLoad( gmsPlatform == null ? null : DataFetcherResult.newResult() - .data(DataPlatformMapper.map(gmsPlatform)) + .data(DataPlatformMapper.map(context, gmsPlatform)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java index c2dc3bfabd07c..a7c765f5dcbf6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformInfo; import com.linkedin.datahub.graphql.generated.PlatformType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; @Deprecated public class DataPlatformInfoMapper @@ -12,12 +14,15 @@ public class DataPlatformInfoMapper public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); public static DataPlatformInfo map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + return INSTANCE.apply(context, platform); } @Override - public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + public DataPlatformInfo apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { final DataPlatformInfo result = new DataPlatformInfo(); result.setType(PlatformType.valueOf(input.getType().toString())); result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java index f7078f9f37d7c..df3fc7fb6434e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -14,17 +15,20 @@ import com.linkedin.metadata.key.DataPlatformKey; import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformMapper implements ModelMapper { public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); - public static DataPlatform map(@Nonnull final EntityResponse platform) { - return INSTANCE.apply(platform); + public static DataPlatform map( + @Nullable QueryContext context, @Nonnull final EntityResponse platform) { + return INSTANCE.apply(context, platform); } @Override - public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { + public DataPlatform apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataPlatform result = new DataPlatform(); final DataPlatformKey dataPlatformKey = (DataPlatformKey) @@ -48,7 +52,7 @@ public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { DATA_PLATFORM_INFO_ASPECT_NAME, (dataPlatform, dataMap) -> dataPlatform.setProperties( - DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); + DataPlatformPropertiesMapper.map(context, new DataPlatformInfo(dataMap)))); return mappingHelper.getResult(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java index ad6de5505bed6..0043ad65ee5db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatformProperties; import com.linkedin.datahub.graphql.generated.PlatformType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformPropertiesMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class DataPlatformPropertiesMapper public static final DataPlatformPropertiesMapper INSTANCE = new DataPlatformPropertiesMapper(); public static DataPlatformProperties map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + return INSTANCE.apply(context, platform); } @Override public DataPlatformProperties apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { final DataPlatformProperties result = new DataPlatformProperties(); result.setType(PlatformType.valueOf(input.getType().toString())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index c45cec34e5e79..36399ddf784d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -90,7 +90,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataPlatformInstanceMapper.map(gmsResult)) + .data(DataPlatformInstanceMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); @@ -126,6 +126,6 @@ public AutoCompleteResults autoComplete( query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java index 1a2bd0488c4bd..ed9bf0c82d869 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; @@ -23,16 +24,19 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataPlatformInstanceKey; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataPlatformInstanceMapper { public static final DataPlatformInstanceMapper INSTANCE = new DataPlatformInstanceMapper(); - public static DataPlatformInstance map(final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataPlatformInstance map( + @Nullable QueryContext context, final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } - public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) { + public DataPlatformInstance apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataPlatformInstance result = new DataPlatformInstance(); final Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityUrn.toString()); @@ -50,24 +54,26 @@ public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) Constants.OWNERSHIP_ASPECT_NAME, (dataPlatformInstance, dataMap) -> dataPlatformInstance.setOwnership( - OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( Constants.GLOBAL_TAGS_ASPECT_NAME, (dataPlatformInstance, dataMap) -> - this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn)); + this.mapGlobalTags(context, dataPlatformInstance, dataMap, entityUrn)); mappingHelper.mapToResult( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataPlatformInstance, dataMap) -> dataPlatformInstance.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( Constants.STATUS_ASPECT_NAME, (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap)))); + dataPlatformInstance.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( Constants.DEPRECATION_ASPECT_NAME, (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataPlatformInstance.setDeprecation( + DeprecationMapper.map(context, new Deprecation(dataMap)))); return mappingHelper.getResult(); } @@ -103,12 +109,13 @@ private void mapDataPlatformInstanceProperties( dataPlatformInstance.setProperties(properties); } - private void mapGlobalTags( + private static void mapGlobalTags( + @Nullable QueryContext context, @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataPlatformInstance.setTags(globalTags); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index 48a0cb984862d..7a4d342281fe5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -4,6 +4,7 @@ import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataProcessInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; @@ -13,6 +14,7 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -23,12 +25,14 @@ public class DataProcessInstanceMapper implements ModelMapper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult( - DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + context, DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); return mappingHelper.getResult(); } private void mapDataProcessProperties( - @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + @Nonnull QueryContext context, @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); dpi.setName(dataProcessInstanceProperties.getName()); if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); + dpi.setCreated(AuditStampMapper.map(context, dataProcessInstanceProperties.getCreated())); } if (dataProcessInstanceProperties.hasExternalUrl()) { dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java index fd60711e8c569..b7dcb74b10fb2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.dataprocess.DataProcessInstanceRunEvent; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataProcessInstanceRunEventMapper implements TimeSeriesAspectMapper { @@ -13,13 +15,13 @@ public class DataProcessInstanceRunEventMapper new DataProcessInstanceRunEventMapper(); public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { DataProcessInstanceRunEvent runEvent = GenericRecordUtils.deserializeAspect( @@ -38,7 +40,7 @@ public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( runEvent.getStatus().toString())); } if (runEvent.hasResult()) { - result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); + result.setResult(DataProcessInstanceRunResultMapper.map(context, runEvent.getResult())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java index 422bea73925a8..7026856503a0b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResultType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.dataprocess.DataProcessInstanceRunResult; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataProcessInstanceRunResultMapper implements ModelMapper< @@ -14,13 +16,13 @@ public class DataProcessInstanceRunResultMapper new DataProcessInstanceRunResultMapper(); public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map( - @Nonnull final DataProcessInstanceRunResult input) { - return INSTANCE.apply(input); + @Nullable QueryContext context, @Nonnull final DataProcessInstanceRunResult input) { + return INSTANCE.apply(context, input); } @Override public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply( - @Nonnull final DataProcessInstanceRunResult input) { + @Nullable QueryContext context, @Nonnull final DataProcessInstanceRunResult input) { final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index a63c4bbbbf1d2..6689ddf56afe4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -85,7 +85,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : dataProductUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -95,7 +95,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataProductMapper.map(gmsResult)) + .data(DataProductMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -114,7 +114,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), DATA_PRODUCT_ENTITY_NAME, query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index b63bfde980057..08637dbfd01ed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataproduct.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; @@ -17,6 +18,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataProduct; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; @@ -35,17 +38,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataProductMapper implements ModelMapper { public static final DataProductMapper INSTANCE = new DataProductMapper(); - public static DataProduct map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataProduct map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataProduct apply(@Nonnull final EntityResponse entityResponse) { + public DataProduct apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataProduct result = new DataProduct(); Urn entityUrn = entityResponse.getUrn(); @@ -60,37 +66,43 @@ public DataProduct apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + dataProduct.setTags(GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataProduct, dataMap) -> dataProduct.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult( DOMAINS_ASPECT_NAME, (dataProduct, dataMap) -> dataProduct.setDomain( - DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); + DomainAssociationMapper.map(context, new Domains(dataMap), dataProduct.getUrn()))); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataProduct.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataProduct, dataMap) -> dataProduct.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return result; + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, DataProduct.class); + } else { + return result; + } } private void mapDataProductProperties( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 0ae41eef6b1b1..30d03d4b5c356 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -94,10 +94,10 @@ public class DatasetType private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; - private final EntityClient _entityClient; + private final EntityClient entityClient; public DatasetType(final EntityClient entityClient) { - _entityClient = entityClient; + this.entityClient = entityClient; } @Override @@ -132,13 +132,13 @@ public List> batchLoad( final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final Map datasetMap = - _entityClient.batchGetV2( + entityClient.batchGetV2( Constants.DATASET_ENTITY_NAME, new HashSet<>(urns), ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urnStrs.size()); for (Urn urn : urns) { gmsResults.add(datasetMap.getOrDefault(urn, null)); } @@ -148,7 +148,7 @@ public List> batchLoad( gmsDataset == null ? null : DataFetcherResult.newResult() - .data(DatasetMapper.map(gmsDataset)) + .data(DatasetMapper.map(context, gmsDataset)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -166,14 +166,14 @@ public SearchResults search( throws Exception { final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); final SearchResult searchResult = - _entityClient.search( + entityClient.search( context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(true)), ENTITY_NAME, query, facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -185,9 +185,9 @@ public AutoCompleteResults autoComplete( @Nonnull final QueryContext context) throws Exception { final AutoCompleteResult result = - _entityClient.autoComplete( + entityClient.autoComplete( context.getOperationContext(), ENTITY_NAME, query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -202,28 +202,28 @@ public BrowseResults browse( final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; final BrowseResult result = - _entityClient.browse( + entityClient.browse( context.getOperationContext().withSearchFlags(flags -> flags.setFulltext(false)), "dataset", pathStr, facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { final StringArray result = - _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(context, result); } @Override public List batchUpdate( @Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { - final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Urn actor = Urn.createFromString(context.getActorUrn()); final Collection proposals = Arrays.stream(input) @@ -231,7 +231,7 @@ public List batchUpdate( updateInput -> { if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { Collection datasetProposals = - DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); + DatasetUpdateInputMapper.map(context, updateInput.getUpdate(), actor); datasetProposals.forEach( proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); return datasetProposals; @@ -246,7 +246,7 @@ public List batchUpdate( Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } @@ -261,14 +261,13 @@ public Dataset update( @Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); final Collection proposals = - DatasetUpdateInputMapper.map(input, actor); + DatasetUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } @@ -285,7 +284,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java index df019cc5df8fe..a328e31ba7608 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java @@ -94,7 +94,7 @@ public List> batchLoad( gmsDataset == null ? null : DataFetcherResult.newResult() - .data(VersionedDatasetMapper.map(gmsDataset)) + .data(VersionedDatasetMapper.map(context, gmsDataset)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java index 5fe7815ea2f8d..e63335beef9c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.assertion.AssertionRunEvent; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AssertionResult; import com.linkedin.datahub.graphql.generated.AssertionResultType; import com.linkedin.datahub.graphql.generated.AssertionRunStatus; @@ -12,6 +13,7 @@ import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AssertionRunEventMapper implements TimeSeriesAspectMapper { @@ -19,13 +21,13 @@ public class AssertionRunEventMapper public static final AssertionRunEventMapper INSTANCE = new AssertionRunEventMapper(); public static com.linkedin.datahub.graphql.generated.AssertionRunEvent map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { AssertionRunEvent gmsAssertionRunEvent = GenericRecordUtils.deserializeAspect( @@ -43,17 +45,17 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setStatus( AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); if (gmsAssertionRunEvent.hasBatchSpec()) { - assertionRunEvent.setBatchSpec(mapBatchSpec(gmsAssertionRunEvent.getBatchSpec())); + assertionRunEvent.setBatchSpec(mapBatchSpec(context, gmsAssertionRunEvent.getBatchSpec())); } if (gmsAssertionRunEvent.hasPartitionSpec()) { assertionRunEvent.setPartitionSpec(mapPartitionSpec(gmsAssertionRunEvent.getPartitionSpec())); } if (gmsAssertionRunEvent.hasResult()) { - assertionRunEvent.setResult(mapAssertionResult(gmsAssertionRunEvent.getResult())); + assertionRunEvent.setResult(mapAssertionResult(context, gmsAssertionRunEvent.getResult())); } if (gmsAssertionRunEvent.hasRuntimeContext()) { assertionRunEvent.setRuntimeContext( - StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); + StringMapMapper.map(context, gmsAssertionRunEvent.getRuntimeContext())); } return assertionRunEvent; @@ -66,7 +68,8 @@ private PartitionSpec mapPartitionSpec(com.linkedin.timeseries.PartitionSpec gms return partitionSpec; } - private AssertionResult mapAssertionResult(com.linkedin.assertion.AssertionResult gmsResult) { + private AssertionResult mapAssertionResult( + @Nullable QueryContext context, com.linkedin.assertion.AssertionResult gmsResult) { AssertionResult datasetAssertionResult = new AssertionResult(); datasetAssertionResult.setRowCount(gmsResult.getRowCount()); datasetAssertionResult.setActualAggValue(gmsResult.getActualAggValue()); @@ -79,18 +82,20 @@ private AssertionResult mapAssertionResult(com.linkedin.assertion.AssertionResul } if (gmsResult.hasNativeResults()) { - datasetAssertionResult.setNativeResults(StringMapMapper.map(gmsResult.getNativeResults())); + datasetAssertionResult.setNativeResults( + StringMapMapper.map(context, gmsResult.getNativeResults())); } return datasetAssertionResult; } - private BatchSpec mapBatchSpec(com.linkedin.assertion.BatchSpec gmsBatchSpec) { + private BatchSpec mapBatchSpec( + @Nullable QueryContext context, com.linkedin.assertion.BatchSpec gmsBatchSpec) { BatchSpec batchSpec = new BatchSpec(); batchSpec.setNativeBatchId(gmsBatchSpec.getNativeBatchId()); batchSpec.setLimit(gmsBatchSpec.getLimit()); batchSpec.setQuery(gmsBatchSpec.getQuery()); - batchSpec.setCustomProperties(StringMapMapper.map(gmsBatchSpec.getCustomProperties())); + batchSpec.setCustomProperties(StringMapMapper.map(context, gmsBatchSpec.getCustomProperties())); return batchSpec; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java index 1644e0243a181..a4b076f8c8bf2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DatasetDeprecationMapper implements ModelMapper { @@ -10,12 +12,15 @@ public class DatasetDeprecationMapper public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); public static Deprecation map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { - return INSTANCE.apply(deprecation); + return INSTANCE.apply(context, deprecation); } @Override - public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { + public Deprecation apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.dataset.DatasetDeprecation input) { final Deprecation result = new Deprecation(); result.setActor(input.getActor().toString()); result.setDeprecated(input.isDeprecated()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java index 7e5372268170b..de715f28ef783 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetFilterMapper.java @@ -1,21 +1,25 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DatasetFilter; import com.linkedin.datahub.graphql.generated.DatasetFilterType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DatasetFilterMapper implements ModelMapper { public static final DatasetFilterMapper INSTANCE = new DatasetFilterMapper(); - public static DatasetFilter map(@Nonnull final com.linkedin.dataset.DatasetFilter metadata) { - return INSTANCE.apply(metadata); + public static DatasetFilter map( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetFilter metadata) { + return INSTANCE.apply(context, metadata); } @Override - public DatasetFilter apply(@Nonnull final com.linkedin.dataset.DatasetFilter input) { + public DatasetFilter apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.dataset.DatasetFilter input) { final DatasetFilter result = new DatasetFilter(); result.setType(DatasetFilterType.valueOf(input.getType().name())); result.setSql(input.getSql()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 326871c95d205..89d5aa8621bf0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.Access; @@ -18,6 +19,8 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; @@ -58,6 +61,7 @@ import com.linkedin.schema.SchemaMetadata; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** @@ -70,11 +74,17 @@ public class DatasetMapper implements ModelMapper { public static final DatasetMapper INSTANCE = new DatasetMapper(); - public static Dataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); + public static Dataset map( + @Nullable final QueryContext context, @Nonnull final EntityResponse dataset) { + return INSTANCE.apply(context, dataset); } public Dataset apply(@Nonnull final EntityResponse entityResponse) { + return apply(null, entityResponse); + } + + public Dataset apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { Dataset result = new Dataset(); Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityResponse.getUrn().toString()); @@ -92,11 +102,12 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + dataset.setDeprecation( + DatasetDeprecationMapper.map(context, new DatasetDeprecation(dataMap)))); mappingHelper.mapToResult( SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), entityUrn))); + dataset.setSchema(SchemaMapper.map(context, new SchemaMetadata(dataMap), entityUrn))); mappingHelper.mapToResult( EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); @@ -104,41 +115,44 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> dataset.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataset.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> dataset.setEditableSchemaMetadata( - EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + EditableSchemaMetadataMapper.map( + context, new EditableSchemaMetadata(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> dataset.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, CONTAINER_ASPECT_NAME, DatasetMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, DatasetMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataset.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( SIBLINGS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); + (dataset, dataMap) -> + dataset.setSiblings(SiblingsMapper.map(context, new Siblings(dataMap)))); mappingHelper.mapToResult( UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> @@ -146,11 +160,11 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); mappingHelper.mapToResult( EMBED_ASPECT_NAME, - (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(context, new Embed(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (dataset, dataMap) -> - dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> @@ -159,15 +173,21 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((dataset, dataMap) -> dataset.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); mappingHelper.mapToResult( SUB_TYPES_ASPECT_NAME, - (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); + (dashboard, dataMap) -> + dashboard.setSubTypes(SubTypesMapper.map(context, new SubTypes(dataMap)))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Dataset.class); + } else { + return mappingHelper.getResult(); + } } private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { @@ -242,15 +262,19 @@ private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMa dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags( - @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull Dataset dataset, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataset.setGlobalTags(globalTags); dataset.setTags(globalTags); } - private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, @Nonnull Dataset dataset, @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); dataset.setContainer( @@ -260,8 +284,9 @@ private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { .build()); } - private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Dataset dataset, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + dataset.setDomain(DomainAssociationMapper.map(context, domains, dataset.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java index 25639e431fac1..e966993871d06 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.dataset.DatasetFieldProfile; import com.linkedin.dataset.DatasetProfile; @@ -7,6 +8,7 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DatasetProfileMapper implements TimeSeriesAspectMapper { @@ -14,13 +16,13 @@ public class DatasetProfileMapper public static final DatasetProfileMapper INSTANCE = new DatasetProfileMapper(); public static com.linkedin.datahub.graphql.generated.DatasetProfile map( - @Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(context, envelopedAspect); } @Override public com.linkedin.datahub.graphql.generated.DatasetProfile apply( - @Nonnull final EnvelopedAspect envelopedAspect) { + @Nullable QueryContext context, @Nonnull final EnvelopedAspect envelopedAspect) { DatasetProfile gmsProfile = GenericRecordUtils.deserializeAspect( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 0b05d420030b5..122298bcab654 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -7,6 +7,7 @@ import com.linkedin.common.TagAssociationArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; @@ -23,6 +24,7 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DatasetUpdateInputMapper implements InputModelMapper, Urn> { @@ -30,13 +32,17 @@ public class DatasetUpdateInputMapper public static final DatasetUpdateInputMapper INSTANCE = new DatasetUpdateInputMapper(); public static Collection map( - @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(datasetUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final DatasetUpdateInput datasetUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, datasetUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final DatasetUpdateInput datasetUpdateInput, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(6); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATASET_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -46,7 +52,7 @@ public Collection apply( if (datasetUpdateInput.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), + OwnershipUpdateMapper.map(context, datasetUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -65,7 +71,8 @@ public Collection apply( if (datasetUpdateInput.getInstitutionalMemory() != null) { proposals.add( updateMappingHelper.aspectToProposal( - InstitutionalMemoryUpdateMapper.map(datasetUpdateInput.getInstitutionalMemory()), + InstitutionalMemoryUpdateMapper.map( + context, datasetUpdateInput.getInstitutionalMemory()), INSTITUTIONAL_MEMORY_ASPECT_NAME)); } @@ -75,14 +82,14 @@ public Collection apply( globalTags.setTags( new TagAssociationArray( datasetUpdateInput.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } else { // Tags field overrides deprecated globalTags field globalTags.setTags( new TagAssociationArray( datasetUpdateInput.getTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); @@ -93,7 +100,7 @@ public Collection apply( editableSchemaMetadata.setEditableSchemaFieldInfo( new EditableSchemaFieldInfoArray( datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream() - .map(element -> mapSchemaFieldInfo(element)) + .map(element -> mapSchemaFieldInfo(context, element)) .collect(Collectors.toList()))); editableSchemaMetadata.setLastModified(auditStamp); editableSchemaMetadata.setCreated(auditStamp); @@ -117,6 +124,7 @@ public Collection apply( } private EditableSchemaFieldInfo mapSchemaFieldInfo( + @Nullable QueryContext context, final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo) { final EditableSchemaFieldInfo output = new EditableSchemaFieldInfo(); @@ -130,7 +138,7 @@ private EditableSchemaFieldInfo mapSchemaFieldInfo( globalTags.setTags( new TagAssociationArray( schemaFieldInfo.getGlobalTags().getTags().stream() - .map(element -> TagAssociationUpdateMapper.map(element)) + .map(element -> TagAssociationUpdateMapper.map(context, element)) .collect(Collectors.toList()))); output.setGlobalTags(globalTags); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java index f54adbe8ba26c..15ba9d025ec85 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java @@ -1,22 +1,28 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.schema.EditableSchemaFieldInfo; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EditableSchemaFieldInfoMapper { public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( - @Nonnull final EditableSchemaFieldInfo fieldInfo, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(fieldInfo, entityUrn); + @Nullable final QueryContext context, + @Nonnull final EditableSchemaFieldInfo fieldInfo, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, fieldInfo, entityUrn); } public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( - @Nonnull final EditableSchemaFieldInfo input, @Nonnull final Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final EditableSchemaFieldInfo input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); if (input.hasDescription()) { @@ -26,11 +32,12 @@ public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( result.setFieldPath((input.getFieldPath())); } if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setGlobalTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); } if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(context, input.getGlossaryTerms(), entityUrn)); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java index 3cf012a523d54..1c1e77f66a1ec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java @@ -1,26 +1,32 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.schema.EditableSchemaMetadata; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EditableSchemaMetadataMapper { public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( - @Nonnull final EditableSchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + @Nullable QueryContext context, + @Nonnull final EditableSchemaMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); } public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply( - @Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final EditableSchemaMetadata input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); result.setEditableSchemaFieldInfo( input.getEditableSchemaFieldInfo().stream() - .map(schemaField -> EditableSchemaFieldInfoMapper.map(schemaField, entityUrn)) + .map(schemaField -> EditableSchemaFieldInfoMapper.map(context, schemaField, entityUrn)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java index b99b243da5b94..56ec8de758857 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java @@ -1,43 +1,48 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j public class ForeignKeyConstraintMapper { private ForeignKeyConstraintMapper() {} - public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint constraint) { + public static ForeignKeyConstraint map( + @Nullable QueryContext context, com.linkedin.schema.ForeignKeyConstraint constraint) { ForeignKeyConstraint result = new ForeignKeyConstraint(); result.setName(constraint.getName()); if (constraint.hasForeignDataset()) { - result.setForeignDataset((Dataset) UrnToEntityMapper.map(constraint.getForeignDataset())); + result.setForeignDataset( + (Dataset) UrnToEntityMapper.map(context, constraint.getForeignDataset())); } if (constraint.hasSourceFields()) { result.setSourceFields( constraint.getSourceFields().stream() - .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .map(schemaFieldUrn -> mapSchemaFieldEntity(context, schemaFieldUrn)) .collect(Collectors.toList())); } if (constraint.hasForeignFields()) { result.setForeignFields( constraint.getForeignFields().stream() - .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .map(schemaFieldUrn -> mapSchemaFieldEntity(context, schemaFieldUrn)) .collect(Collectors.toList())); } return result; } - private static SchemaFieldEntity mapSchemaFieldEntity(Urn schemaFieldUrn) { + private static SchemaFieldEntity mapSchemaFieldEntity( + @Nullable QueryContext context, Urn schemaFieldUrn) { SchemaFieldEntity result = new SchemaFieldEntity(); try { Urn resourceUrn = Urn.createFromString(schemaFieldUrn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(resourceUrn)); + result.setParent(UrnToEntityMapper.map(context, resourceUrn)); } catch (Exception e) { throw new RuntimeException("Error converting schemaField parent urn string to Urn", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java index dd345bebf657f..28096f30d1817 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.KeyValueSchema; import com.linkedin.datahub.graphql.generated.PlatformSchema; import com.linkedin.datahub.graphql.generated.TableSchema; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.schema.SchemaMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class PlatformSchemaMapper implements ModelMapper { public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); - public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { - return INSTANCE.apply(metadata); + public static PlatformSchema map( + @Nullable QueryContext context, @Nonnull final SchemaMetadata.PlatformSchema metadata) { + return INSTANCE.apply(context, metadata); } @Override - public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { + public PlatformSchema apply( + @Nullable QueryContext context, @Nonnull final SchemaMetadata.PlatformSchema input) { Object result; if (input.isSchemaless()) { return null; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index e0a74d351125f..a2cc9d5a66edd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; @@ -9,18 +10,23 @@ import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.metadata.utils.SchemaFieldUtils; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SchemaFieldMapper { public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); public static SchemaField map( - @Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); + @Nullable final QueryContext context, + @Nonnull final com.linkedin.schema.SchemaField metadata, + @Nonnull Urn entityUrn) { + return INSTANCE.apply(context, metadata, entityUrn); } public SchemaField apply( - @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final com.linkedin.schema.SchemaField input, + @Nonnull Urn entityUrn) { final SchemaField result = new SchemaField(); result.setDescription(input.getDescription()); result.setFieldPath(input.getFieldPath()); @@ -31,11 +37,12 @@ public SchemaField apply( result.setType(mapSchemaFieldDataType(input.getType())); result.setLabel(input.getLabel()); if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setGlobalTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(context, input.getGlobalTags(), entityUrn)); } if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(context, input.getGlossaryTerms(), entityUrn)); } result.setIsPartOfKey(input.isIsPartOfKey()); result.setIsPartitioningKey(input.isIsPartitioningKey()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java index d0424ba89eca1..fd089184fb1c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Schema; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaMetadata; @@ -12,18 +13,23 @@ public class SchemaMapper { public static final SchemaMapper INSTANCE = new SchemaMapper(); - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, null, entityUrn); + public static Schema map( + @Nullable QueryContext context, + @Nonnull final SchemaMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, metadata, null, entityUrn); } public static Schema map( + @Nullable QueryContext context, @Nonnull final SchemaMetadata metadata, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, systemMetadata, entityUrn); + return INSTANCE.apply(context, metadata, systemMetadata, entityUrn); } public Schema apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.schema.SchemaMetadata input, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { @@ -42,13 +48,13 @@ public Schema apply( result.setPrimaryKeys(input.getPrimaryKeys()); result.setFields( input.getFields().stream() - .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .map(field -> SchemaFieldMapper.map(context, field, entityUrn)) .collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + result.setPlatformSchema(PlatformSchemaMapper.map(context, input.getPlatformSchema())); if (input.getForeignKeys() != null) { result.setForeignKeys( input.getForeignKeys().stream() - .map(ForeignKeyConstraintMapper::map) + .map(fk -> ForeignKeyConstraintMapper.map(context, fk)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index e550280a6c2db..327cae3bae11f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -1,28 +1,37 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.schema.SchemaMetadata; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SchemaMetadataMapper { public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( - @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(aspect, entityUrn); + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, aspect, entityUrn); } public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + @Nullable QueryContext context, + @Nonnull final EnvelopedAspect aspect, + @Nonnull final Urn entityUrn) { final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); - return apply(input, entityUrn, aspect.getVersion()); + return apply(context, input, entityUrn, aspect.getVersion()); } public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final SchemaMetadata input, final Urn entityUrn, final long version) { + @Nullable QueryContext context, + @Nonnull final SchemaMetadata input, + final Urn entityUrn, + final long version) { final com.linkedin.datahub.graphql.generated.SchemaMetadata result = new com.linkedin.datahub.graphql.generated.SchemaMetadata(); @@ -37,14 +46,16 @@ public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( result.setPrimaryKeys(input.getPrimaryKeys()); result.setFields( input.getFields().stream() - .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .map(field -> SchemaFieldMapper.map(context, field, entityUrn)) .collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + result.setPlatformSchema(PlatformSchemaMapper.map(context, input.getPlatformSchema())); result.setAspectVersion(version); if (input.hasForeignKeys()) { result.setForeignKeys( input.getForeignKeys().stream() - .map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map(foreignKeyConstraint)) + .map( + foreignKeyConstraint -> + ForeignKeyConstraintMapper.map(context, foreignKeyConstraint)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java index 727e8629f74b2..817c7c983ecc5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.Deprecation; @@ -10,6 +11,8 @@ import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DatasetEditableProperties; @@ -38,6 +41,7 @@ import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.schema.SchemaMetadata; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; /** @@ -50,12 +54,14 @@ public class VersionedDatasetMapper implements ModelMapper - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + dataset.setDeprecation( + DatasetDeprecationMapper.map(context, new DatasetDeprecation(dataMap)))); mappingHelper.mapToResult( SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> dataset.setSchema( - SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); + SchemaMapper.map( + context, new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); mappingHelper.mapToResult( EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); @@ -85,35 +93,42 @@ public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> dataset.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + dataset.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + (dataset, dataMap) -> mapGlobalTags(context, dataset, dataMap, entityUrn)); mappingHelper.mapToResult( EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> dataset.setEditableSchemaMetadata( - EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + EditableSchemaMetadataMapper.map( + context, new EditableSchemaMetadata(dataMap), entityUrn))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> dataset.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, CONTAINER_ASPECT_NAME, VersionedDatasetMapper::mapContainers); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, VersionedDatasetMapper::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + dataset.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), VersionedDataset.class); + } else { + return mappingHelper.getResult(); + } } private SystemMetadata getSystemMetadata(EnvelopedAspectMap aspectMap, String aspectName) { @@ -174,14 +189,20 @@ private void mapViewProperties(@Nonnull VersionedDataset dataset, @Nonnull DataM dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags( - @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); dataset.setTags(globalTags); } - private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + private static void mapContainers( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap) { final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); dataset.setContainer( @@ -191,9 +212,12 @@ private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap d .build()); } - private void mapDomains(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull VersionedDataset dataset, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + dataset.setDomain(DomainAssociationMapper.map(context, domains, dataset.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java index 612644ae2dbb2..b2e3b2c7447d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeEntityMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.DATA_TYPE_INFO_ASPECT_NAME; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataTypeEntity; import com.linkedin.datahub.graphql.generated.DataTypeInfo; import com.linkedin.datahub.graphql.generated.EntityType; @@ -11,17 +12,20 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataTypeEntityMapper implements ModelMapper { public static final DataTypeEntityMapper INSTANCE = new DataTypeEntityMapper(); - public static DataTypeEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataTypeEntity apply(@Nonnull final EntityResponse entityResponse) { + public DataTypeEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataTypeEntity result = new DataTypeEntity(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATA_TYPE); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java index 5ea1680546ce6..52fd21d558321 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datatype/DataTypeType.java @@ -68,7 +68,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataTypeEntityMapper.map(gmsResult)) + .data(DataTypeEntityMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java index 51ef254f52225..37b2018a2d450 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java @@ -1,9 +1,13 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainAssociation; import com.linkedin.datahub.graphql.generated.EntityType; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -15,13 +19,19 @@ public class DomainAssociationMapper { public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); public static DomainAssociation map( - @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - return INSTANCE.apply(domains, entityUrn); + @Nullable final QueryContext context, + @Nonnull final com.linkedin.domain.Domains domains, + @Nonnull final String entityUrn) { + return INSTANCE.apply(context, domains, entityUrn); } public DomainAssociation apply( - @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - if (domains.getDomains().size() > 0) { + @Nullable final QueryContext context, + @Nonnull final com.linkedin.domain.Domains domains, + @Nonnull final String entityUrn) { + if (domains.getDomains().size() > 0 + && (context == null + || canView(context.getOperationContext(), domains.getDomains().get(0)))) { DomainAssociation association = new DomainAssociation(); association.setDomain( Domain.builder() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index 8c1af736ff3bd..7d05e0862a96d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.FORMS_ASPECT_NAME; import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; @@ -7,6 +8,8 @@ import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; @@ -20,10 +23,11 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; import com.linkedin.structured.StructuredProperties; +import javax.annotation.Nullable; public class DomainMapper { - public static Domain map(final EntityResponse entityResponse) { + public static Domain map(@Nullable QueryContext context, final EntityResponse entityResponse) { final Domain result = new Domain(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -49,7 +53,8 @@ public static Domain map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { result.setOwnership( - OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + OwnershipMapper.map( + context, new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedInstitutionalMemory = @@ -57,14 +62,16 @@ public static Domain map(final EntityResponse entityResponse) { if (envelopedInstitutionalMemory != null) { result.setInstitutionalMemory( InstitutionalMemoryMapper.map( - new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + context, + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), + entityUrn)); } final EnvelopedAspect envelopedStructuredProps = aspects.get(STRUCTURED_PROPERTIES_ASPECT_NAME); if (envelopedStructuredProps != null) { result.setStructuredProperties( StructuredPropertiesMapper.map( - new StructuredProperties(envelopedStructuredProps.getValue().data()))); + context, new StructuredProperties(envelopedStructuredProps.getValue().data()))); } final EnvelopedAspect envelopedForms = aspects.get(FORMS_ASPECT_NAME); @@ -73,7 +80,11 @@ public static Domain map(final EntityResponse entityResponse) { FormsMapper.map(new Forms(envelopedForms.getValue().data()), entityUrn.toString())); } - return result; + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(result, Domain.class); + } else { + return result; + } } private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index d18633c763eed..a5d4b0176bde1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -75,7 +75,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : domainUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -85,7 +85,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DomainMapper.map(gmsResult)) + .data(DomainMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -116,7 +116,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), Constants.DOMAIN_ENTITY_NAME, query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } private Urn getUrn(final String urnStr) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java index b942ff2325bf7..8afdd3f60e220 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeEntityMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.EntityTypeEntity; import com.linkedin.datahub.graphql.generated.EntityTypeInfo; @@ -11,17 +12,20 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EntityTypeEntityMapper implements ModelMapper { public static final EntityTypeEntityMapper INSTANCE = new EntityTypeEntityMapper(); - public static EntityTypeEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static EntityTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public EntityTypeEntity apply(@Nonnull final EntityResponse entityResponse) { + public EntityTypeEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final EntityTypeEntity result = new EntityTypeEntity(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.ENTITY_TYPE); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java index aa5dfc13ea757..b8f7816df97e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/entitytype/EntityTypeType.java @@ -68,7 +68,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(EntityTypeEntityMapper.map(gmsResult)) + .data(EntityTypeEntityMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java index 8d657a33ff651..53d76de1a1fd2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/CreateERModelRelationshipResolver.java @@ -88,7 +88,7 @@ public CompletableFuture get(DataFetchingEnvironment enviro try { log.debug("Create ERModelRelation input: {}", input); final Collection proposals = - ERModelRelationshipUpdateInputMapper.map(input, actor); + ERModelRelationshipUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); try { _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); @@ -96,6 +96,7 @@ public CompletableFuture get(DataFetchingEnvironment enviro throw new RuntimeException("Failed to create erModelRelationship entity", e); } return ERModelRelationMapper.map( + context, _erModelRelationshipService.getERModelRelationshipResponse( Urn.createFromString(inputUrn.toString()), authentication)); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java index 684680597f54d..12294b51654a6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/ERModelRelationshipType.java @@ -117,7 +117,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(ERModelRelationMapper.map(gmsResult)) + .data(ERModelRelationMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -145,7 +145,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Nonnull @@ -154,7 +154,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c throws Exception { final StringArray result = _entityClient.getBrowsePaths(UrnUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } @Override @@ -174,7 +174,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -188,7 +188,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), ENTITY_NAME, query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } public static boolean canUpdateERModelRelation( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java index 14d3a14fd6c42..3e3ea0216c734 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/UpdateERModelRelationshipResolver.java @@ -41,7 +41,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw try { log.debug("Create ERModelRelation input: {}", input); final Collection proposals = - ERModelRelationshipUpdateInputMapper.map(input, actor); + ERModelRelationshipUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(inputUrn)); try { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java index f8649cadca9c4..50a7b7f895fe6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationMapper.java @@ -10,6 +10,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.ERModelRelationship; import com.linkedin.datahub.graphql.generated.EntityType; @@ -31,6 +32,7 @@ import java.util.List; import java.util.Objects; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -41,12 +43,14 @@ public class ERModelRelationMapper implements ModelMapper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(ER_MODEL_RELATIONSHIP_KEY_ASPECT_NAME, this::mapERModelRelationKey); - mappingHelper.mapToResult(ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, this::mapProperties); + mappingHelper.mapToResult( + context, ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME, this::mapProperties); if (aspectMap != null && aspectMap.containsKey(EDITABLE_ER_MODEL_RELATIONSHIP_PROPERTIES_ASPECT_NAME)) { mappingHelper.mapToResult( @@ -67,31 +72,34 @@ public ERModelRelationship apply(final EntityResponse entityResponse) { INSTITUTIONAL_MEMORY_ASPECT_NAME, (ermodelrelation, dataMap) -> ermodelrelation.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); } if (aspectMap != null && aspectMap.containsKey(OWNERSHIP_ASPECT_NAME)) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (ermodelrelation, dataMap) -> - ermodelrelation.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + ermodelrelation.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); } if (aspectMap != null && aspectMap.containsKey(STATUS_ASPECT_NAME)) { mappingHelper.mapToResult( STATUS_ASPECT_NAME, (ermodelrelation, dataMap) -> - ermodelrelation.setStatus(StatusMapper.map(new Status(dataMap)))); + ermodelrelation.setStatus(StatusMapper.map(context, new Status(dataMap)))); } if (aspectMap != null && aspectMap.containsKey(GLOBAL_TAGS_ASPECT_NAME)) { mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (ermodelrelation, dataMap) -> this.mapGlobalTags(ermodelrelation, dataMap, entityUrn)); + (ermodelrelation, dataMap) -> + this.mapGlobalTags(context, ermodelrelation, dataMap, entityUrn)); } if (aspectMap != null && aspectMap.containsKey(GLOSSARY_TERMS_ASPECT_NAME)) { mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (ermodelrelation, dataMap) -> ermodelrelation.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); } return mappingHelper.getResult(); } @@ -114,7 +122,9 @@ private void mapERModelRelationKey( } private void mapProperties( - @Nonnull ERModelRelationship ermodelrelation, @Nonnull DataMap dataMap) { + @Nullable final QueryContext context, + @Nonnull ERModelRelationship ermodelrelation, + @Nonnull DataMap dataMap) { final ERModelRelationshipProperties ermodelrelationProperties = new ERModelRelationshipProperties(dataMap); ermodelrelation.setProperties( @@ -138,7 +148,7 @@ private void mapProperties( ermodelrelation .getProperties() .setCreatedActor( - UrnToEntityMapper.map(ermodelrelationProperties.getCreated().getActor())); + UrnToEntityMapper.map(context, ermodelrelationProperties.getCreated().getActor())); } } @@ -175,11 +185,12 @@ private List mapERModelRelationFieldMappings( } private void mapGlobalTags( + @Nullable final QueryContext context, @Nonnull ERModelRelationship ermodelrelation, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlobalTags globalTags = - GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn); ermodelrelation.setTags(globalTags); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java index 7c957bab77b68..d18a3e741c433 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ermodelrelationship/mappers/ERModelRelationshipUpdateInputMapper.java @@ -6,6 +6,7 @@ import com.linkedin.common.urn.DatasetUrn; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ERModelRelationshipEditablePropertiesUpdate; import com.linkedin.datahub.graphql.generated.ERModelRelationshipPropertiesInput; import com.linkedin.datahub.graphql.generated.ERModelRelationshipUpdateInput; @@ -25,6 +26,7 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class ERModelRelationshipUpdateInputMapper implements InputModelMapper< @@ -33,13 +35,15 @@ public class ERModelRelationshipUpdateInputMapper new ERModelRelationshipUpdateInputMapper(); public static Collection map( + @Nullable final QueryContext context, @Nonnull final ERModelRelationshipUpdateInput ermodelrelationUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(ermodelrelationUpdateInput, actor); + return INSTANCE.apply(context, ermodelrelationUpdateInput, actor); } @Override - public Collection apply(ERModelRelationshipUpdateInput input, Urn actor) { + public Collection apply( + @Nullable final QueryContext context, ERModelRelationshipUpdateInput input, Urn actor) { final Collection proposals = new ArrayList<>(8); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(ER_MODEL_RELATIONSHIP_ENTITY_NAME); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java index a0ddd4a5883d2..4f2ae014995de 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormMapper.java @@ -6,6 +6,7 @@ import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpGroup; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; @@ -26,16 +27,18 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class FormMapper implements ModelMapper { public static final FormMapper INSTANCE = new FormMapper(); - public static Form map(@Nonnull final EntityResponse form) { - return INSTANCE.apply(form); + public static Form map(@Nullable final QueryContext context, @Nonnull final EntityResponse form) { + return INSTANCE.apply(context, form); } - public Form apply(@Nonnull final EntityResponse entityResponse) { + public Form apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { Form result = new Form(); Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityUrn.toString()); @@ -47,7 +50,7 @@ public Form apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (form, dataMap) -> - form.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + form.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java index 8a09cee353cc9..5edee2b1dcdeb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/form/FormType.java @@ -67,7 +67,9 @@ public List> batchLoad( gmsResult -> gmsResult == null ? null - : DataFetcherResult.

newResult().data(FormMapper.map(gmsResult)).build()) + : DataFetcherResult.newResult() + .data(FormMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Forms", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java index fc8a3768771a5..91eb843030576 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java @@ -72,7 +72,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : glossaryNodeUrns) { gmsResults.add(glossaryNodeMap.getOrDefault(urn, null)); } @@ -82,7 +82,7 @@ public List> batchLoad( gmsGlossaryNode == null ? null : DataFetcherResult.newResult() - .data(GlossaryNodeMapper.map(gmsGlossaryNode)) + .data(GlossaryNodeMapper.map(context, gmsGlossaryNode)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index b6b813cddf99b..9d697cd9220b7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -96,7 +96,7 @@ public List> batchLoad( ASPECTS_TO_RESOLVE, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : glossaryTermUrns) { gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); } @@ -106,7 +106,7 @@ public List> batchLoad( gmsGlossaryTerm == null ? null : DataFetcherResult.newResult() - .data(GlossaryTermMapper.map(gmsGlossaryTerm)) + .data(GlossaryTermMapper.map(context, gmsGlossaryTerm)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -131,7 +131,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -145,7 +145,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "glossaryTerm", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -167,7 +167,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -176,6 +176,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon final StringArray result = _entityClient.getBrowsePaths( GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index a3ce6276e89d7..4912d18614f41 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.Forms; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.GlossaryNodeProperties; @@ -21,17 +24,20 @@ import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class GlossaryNodeMapper implements ModelMapper { public static final GlossaryNodeMapper INSTANCE = new GlossaryNodeMapper(); - public static GlossaryNode map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static GlossaryNode map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { + public GlossaryNode apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { GlossaryNode result = new GlossaryNode(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.GLOSSARY_NODE); @@ -47,18 +53,23 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + glossaryNode.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), GlossaryNode.class); + } else { + return mappingHelper.getResult(); + } } private GlossaryNodeProperties mapGlossaryNodeProperties( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index 14958b10a9bdc..1274646f45ec4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.Deprecation; @@ -9,6 +10,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; @@ -27,6 +30,7 @@ import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -37,12 +41,14 @@ public class GlossaryTermMapper implements ModelMapper - glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + glossaryTerm.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, this::mapDomains); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + glossaryTerm.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> dataset.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> @@ -95,7 +103,11 @@ public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { if (result.getProperties() != null && result.getProperties().getName() == null) { result.getProperties().setName(legacyName); } - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), GlossaryTerm.class); + } else { + return mappingHelper.getResult(); + } } private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { @@ -104,8 +116,11 @@ private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull Dat glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); } - private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + private void mapDomains( + @Nullable QueryContext context, + @Nonnull GlossaryTerm glossaryTerm, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); - glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); + glossaryTerm.setDomain(DomainAssociationMapper.map(context, domains, glossaryTerm.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index 68475a2599158..705b924d208ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.GlossaryTerm; @@ -20,18 +23,23 @@ public class GlossaryTermsMapper { public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); public static GlossaryTerms map( + @Nonnull final QueryContext context, @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(glossaryTerms, entityUrn); + return INSTANCE.apply(context, glossaryTerms, entityUrn); } public GlossaryTerms apply( + @Nonnull final QueryContext context, @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { com.linkedin.datahub.graphql.generated.GlossaryTerms result = new com.linkedin.datahub.graphql.generated.GlossaryTerms(); result.setTerms( glossaryTerms.getTerms().stream() + .filter( + association -> + context == null || canView(context.getOperationContext(), association.getUrn())) .map(association -> this.mapGlossaryTermAssociation(association, entityUrn)) .collect(Collectors.toList())); return result; @@ -41,12 +49,14 @@ private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossa @Nonnull final GlossaryTermAssociation input, @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); + final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); resultGlossaryTerm.setUrn(input.getUrn().toString()); resultGlossaryTerm.setName( GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); result.setTerm(resultGlossaryTerm); + if (input.hasActor()) { CorpUser actor = new CorpUser(); actor.setUrn(input.getActor().toString()); @@ -56,6 +66,7 @@ private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossa if (entityUrn != null) { result.setAssociatedUrn(entityUrn.toString()); } + return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java index f3824f3237617..c2aae4bd27d54 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Incident; import com.linkedin.datahub.graphql.generated.IncidentSource; @@ -16,11 +17,12 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.incident.IncidentInfo; import com.linkedin.metadata.Constants; +import javax.annotation.Nullable; /** Maps a GMS {@link EntityResponse} to a GraphQL incident. */ public class IncidentMapper { - public static Incident map(final EntityResponse entityResponse) { + public static Incident map(@Nullable QueryContext context, final EntityResponse entityResponse) { final Incident result = new Incident(); final Urn entityUrn = entityResponse.getUrn(); final EnvelopedAspectMap aspects = entityResponse.getAspects(); @@ -37,14 +39,14 @@ public static Incident map(final EntityResponse entityResponse) { result.setDescription(info.getDescription(GetMode.NULL)); result.setPriority(info.getPriority(GetMode.NULL)); // TODO: Support multiple entities per incident. - result.setEntity(UrnToEntityMapper.map(info.getEntities().get(0))); + result.setEntity(UrnToEntityMapper.map(context, info.getEntities().get(0))); if (info.hasSource()) { - result.setSource(mapIncidentSource(info.getSource())); + result.setSource(mapIncidentSource(context, info.getSource())); } if (info.hasStatus()) { - result.setStatus(mapStatus(info.getStatus())); + result.setStatus(mapStatus(context, info.getStatus())); } - result.setCreated(AuditStampMapper.map(info.getCreated())); + result.setCreated(AuditStampMapper.map(context, info.getCreated())); } else { throw new RuntimeException(String.format("Incident does not exist!. urn: %s", entityUrn)); } @@ -52,20 +54,20 @@ public static Incident map(final EntityResponse entityResponse) { } private static IncidentStatus mapStatus( - final com.linkedin.incident.IncidentStatus incidentStatus) { + @Nullable QueryContext context, final com.linkedin.incident.IncidentStatus incidentStatus) { final IncidentStatus result = new IncidentStatus(); result.setState(IncidentState.valueOf(incidentStatus.getState().name())); result.setMessage(incidentStatus.getMessage(GetMode.NULL)); - result.setLastUpdated(AuditStampMapper.map(incidentStatus.getLastUpdated())); + result.setLastUpdated(AuditStampMapper.map(context, incidentStatus.getLastUpdated())); return result; } private static IncidentSource mapIncidentSource( - final com.linkedin.incident.IncidentSource incidentSource) { + @Nullable QueryContext context, final com.linkedin.incident.IncidentSource incidentSource) { final IncidentSource result = new IncidentSource(); result.setType(IncidentSourceType.valueOf(incidentSource.getType().name())); if (incidentSource.hasSourceUrn()) { - result.setSource(UrnToEntityMapper.map(incidentSource.getSourceUrn())); + result.setSource(UrnToEntityMapper.map(context, incidentSource.getSourceUrn())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java index 2e62bf5a0c345..0ef204f4073ad 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/incident/IncidentType.java @@ -68,7 +68,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(IncidentMapper.map(gmsResult)) + .data(IncidentMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java index 621fcf5f04140..2b576230c99a2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java @@ -1,28 +1,32 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.query.AutoCompleteResult; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class AutoCompleteResultsMapper implements ModelMapper { public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); - public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { - return INSTANCE.apply(results); + public static AutoCompleteResults map( + @Nullable final QueryContext context, @Nonnull final AutoCompleteResult results) { + return INSTANCE.apply(context, results); } @Override - public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { + public AutoCompleteResults apply( + @Nullable final QueryContext context, @Nonnull final AutoCompleteResult input) { final AutoCompleteResults result = new AutoCompleteResults(); result.setQuery(input.getQuery()); result.setSuggestions(input.getSuggestions()); result.setEntities( input.getEntities().stream() - .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .map(entity -> UrnToEntityMapper.map(context, entity.getUrn())) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java index 689ff82147e15..2d5deec7edb51 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java @@ -1,22 +1,24 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.datahub.graphql.Constants; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowsePath; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowsePathMapper implements ModelMapper { public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); - public static BrowsePath map(@Nonnull final String input) { - return INSTANCE.apply(input); + public static BrowsePath map(@Nullable final QueryContext context, @Nonnull final String input) { + return INSTANCE.apply(context, input); } @Override - public BrowsePath apply(@Nonnull final String input) { + public BrowsePath apply(@Nullable final QueryContext context, @Nonnull final String input) { final BrowsePath browsePath = new BrowsePath(); final List path = Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java index ae70823d675d8..bb70e1ae4b77d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java @@ -1,23 +1,27 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowsePath; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowsePathsMapper implements ModelMapper, List> { public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); - public static List map(@Nonnull final List input) { - return INSTANCE.apply(input); + public static List map( + @Nullable final QueryContext context, @Nonnull final List input) { + return INSTANCE.apply(context, input); } @Override - public List apply(@Nonnull final List input) { + public List apply( + @Nullable final QueryContext context, @Nonnull final List input) { List results = new ArrayList<>(); for (String pathStr : input) { - results.add(BrowsePathMapper.map(pathStr)); + results.add(BrowsePathMapper.map(context, pathStr)); } return results; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java index 5cac03b19a74c..3c2661a80b873 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroup; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResults; @@ -8,11 +9,13 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class BrowseResultMapper { private BrowseResultMapper() {} - public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) { + public static BrowseResults map( + @Nullable final QueryContext context, com.linkedin.metadata.browse.BrowseResult input) { final BrowseResults result = new BrowseResults(); if (!input.hasFrom() || !input.hasPageSize() || !input.hasNumElements()) { @@ -24,13 +27,14 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) result.setTotal(input.getNumElements()); final BrowseResultMetadata browseResultMetadata = new BrowseResultMetadata(); - browseResultMetadata.setPath(BrowsePathMapper.map(input.getMetadata().getPath()).getPath()); + browseResultMetadata.setPath( + BrowsePathMapper.map(context, input.getMetadata().getPath()).getPath()); browseResultMetadata.setTotalNumEntities(input.getMetadata().getTotalNumEntities()); result.setMetadata(browseResultMetadata); List entities = input.getEntities().stream() - .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .map(entity -> UrnToEntityMapper.map(context, entity.getUrn())) .collect(Collectors.toList()); result.setEntities(entities); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java index c58341f994d4f..984ef0fdcf254 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; +import javax.annotation.Nullable; + /** Maps an input of type I to an output of type O with actor context. */ public interface InputModelMapper { - O apply(final I input, final A actor); + O apply(@Nullable final QueryContext context, final I input, final A actor); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 701e2b3e0c595..3cae0155a86db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -4,6 +4,7 @@ import static com.linkedin.metadata.utils.SearchUtil.*; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AggregationMetadata; import com.linkedin.datahub.graphql.generated.FacetMetadata; import com.linkedin.datahub.graphql.generated.MatchedField; @@ -12,7 +13,6 @@ import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.entitytype.EntityTypeMapper; import com.linkedin.metadata.entity.validation.ValidationUtils; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; import java.net.URISyntaxException; @@ -20,7 +20,7 @@ import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; -import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -28,14 +28,16 @@ public class MapperUtils { private MapperUtils() {} - public static SearchResult mapResult(SearchEntity searchEntity) { + public static SearchResult mapResult( + @Nullable final QueryContext context, SearchEntity searchEntity) { return new SearchResult( - UrnToEntityMapper.map(searchEntity.getEntity()), + UrnToEntityMapper.map(context, searchEntity.getEntity()), getInsightsFromFeatures(searchEntity.getFeatures()), - getMatchedFieldEntry(searchEntity.getMatchedFields())); + getMatchedFieldEntry(context, searchEntity.getMatchedFields())); } public static FacetMetadata mapFacet( + @Nullable final QueryContext context, com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { final FacetMetadata facetMetadata = new FacetMetadata(); List aggregationFacets = @@ -57,7 +59,7 @@ public static FacetMetadata mapFacet( filterValue.getFacetCount(), filterValue.getEntity() == null ? null - : UrnToEntityMapper.map(filterValue.getEntity()))) + : UrnToEntityMapper.map(context, filterValue.getEntity()))) .collect(Collectors.toList())); return facetMetadata; } @@ -73,8 +75,8 @@ public static String convertFilterValue(String filterValue, List isEnti .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } - @Deprecated public static List getMatchedFieldEntry( + @Nullable final QueryContext context, List highlightMetadata) { return highlightMetadata.stream() .map( @@ -85,30 +87,9 @@ public static List getMatchedFieldEntry( if (SearchUtils.isUrn(field.getValue())) { try { Urn urn = Urn.createFromString(field.getValue()); - matchedField.setEntity(UrnToEntityMapper.map(urn)); - } catch (URISyntaxException e) { - log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); - } - } - return matchedField; - }) - .collect(Collectors.toList()); - } - - public static List getMatchedFieldEntry( - @Nonnull EntityRegistry entityRegistry, - List highlightMetadata) { - return highlightMetadata.stream() - .map( - field -> { - MatchedField matchedField = new MatchedField(); - matchedField.setName(field.getName()); - matchedField.setValue(field.getValue()); - if (SearchUtils.isUrn(field.getValue())) { - try { - Urn urn = Urn.createFromString(field.getValue()); - ValidationUtils.validateUrn(entityRegistry, urn); - matchedField.setEntity(UrnToEntityMapper.map(urn)); + ValidationUtils.validateUrn( + context.getOperationContext().getEntityRegistry(), urn); + matchedField.setEntity(UrnToEntityMapper.map(context, urn)); } catch (IllegalArgumentException | URISyntaxException e) { log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java index 2167be9f27ca8..8df26365c45aa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java @@ -1,6 +1,10 @@ package com.linkedin.datahub.graphql.types.mappers; +import com.linkedin.datahub.graphql.QueryContext; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; + /** Simple interface for classes capable of mapping an input of type I to an output of type O. */ public interface ModelMapper { - O apply(final I input); + O apply(@Nullable final QueryContext context, @Nonnull final I input); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java index baf632ae8bdf4..88214ac999a7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.ScrollAcrossLineageResults; @@ -14,14 +15,16 @@ import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnScrollAcrossLineageResultsMapper { public static ScrollAcrossLineageResults map( - LineageScrollResult searchResult) { - return new UrnScrollAcrossLineageResultsMapper().apply(searchResult); + @Nullable final QueryContext context, LineageScrollResult searchResult) { + return new UrnScrollAcrossLineageResultsMapper().apply(context, searchResult); } - public ScrollAcrossLineageResults apply(LineageScrollResult input) { + public ScrollAcrossLineageResults apply( + @Nullable final QueryContext context, LineageScrollResult input) { final ScrollAcrossLineageResults result = new ScrollAcrossLineageResults(); result.setNextScrollId(input.getScrollId()); @@ -30,28 +33,33 @@ public ScrollAcrossLineageResults apply(LineageScrollResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + input.getEntities().stream().map(r -> mapResult(context, r)).collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> mapFacet(context, f)) .collect(Collectors.toList())); return result; } - private SearchAcrossLineageResult mapResult(LineageSearchEntity searchEntity) { + private SearchAcrossLineageResult mapResult( + @Nullable final QueryContext context, LineageSearchEntity searchEntity) { return SearchAcrossLineageResult.builder() - .setEntity(UrnToEntityMapper.map(searchEntity.getEntity())) + .setEntity(UrnToEntityMapper.map(context, searchEntity.getEntity())) .setInsights(getInsightsFromFeatures(searchEntity.getFeatures())) - .setMatchedFields(getMatchedFieldEntry(searchEntity.getMatchedFields())) - .setPaths(searchEntity.getPaths().stream().map(this::mapPath).collect(Collectors.toList())) + .setMatchedFields(getMatchedFieldEntry(context, searchEntity.getMatchedFields())) + .setPaths( + searchEntity.getPaths().stream() + .map(p -> mapPath(context, p)) + .collect(Collectors.toList())) .setDegree(searchEntity.getDegree()) .build(); } - private EntityPath mapPath(UrnArray path) { + private EntityPath mapPath(@Nullable final QueryContext context, UrnArray path) { EntityPath entityPath = new EntityPath(); - entityPath.setPath(path.stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); + entityPath.setPath( + path.stream().map(p -> UrnToEntityMapper.map(context, p)).collect(Collectors.toList())); return entityPath; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java index 72eb71cd095bb..10d17bf1756e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ScrollResults; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnScrollResultsMapper { public static ScrollResults map( + @Nullable final QueryContext context, com.linkedin.metadata.search.ScrollResult scrollResult) { - return new UrnScrollResultsMapper().apply(scrollResult); + return new UrnScrollResultsMapper().apply(context, scrollResult); } - public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { + public ScrollResults apply( + @Nullable final QueryContext context, com.linkedin.metadata.search.ScrollResult input) { final ScrollResults result = new ScrollResults(); if (!input.hasScrollId() && (!input.hasPageSize() || !input.hasNumEntities())) { @@ -25,10 +29,12 @@ public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + input.getEntities().stream() + .map(r -> MapperUtils.mapResult(context, r)) + .collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index 970789facf699..b39b960bb7580 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FreshnessStats; @@ -16,14 +17,16 @@ import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnSearchAcrossLineageResultsMapper { public static SearchAcrossLineageResults map( - LineageSearchResult searchResult) { - return new UrnSearchAcrossLineageResultsMapper().apply(searchResult); + @Nullable final QueryContext context, LineageSearchResult searchResult) { + return new UrnSearchAcrossLineageResultsMapper().apply(context, searchResult); } - public SearchAcrossLineageResults apply(LineageSearchResult input) { + public SearchAcrossLineageResults apply( + @Nullable final QueryContext context, LineageSearchResult input) { final SearchAcrossLineageResults result = new SearchAcrossLineageResults(); result.setStart(input.getFrom()); @@ -32,10 +35,10 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + input.getEntities().stream().map(r -> mapResult(context, r)).collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); if (input.hasFreshness()) { @@ -55,20 +58,25 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { return result; } - private SearchAcrossLineageResult mapResult(LineageSearchEntity searchEntity) { + private SearchAcrossLineageResult mapResult( + @Nullable final QueryContext context, LineageSearchEntity searchEntity) { return SearchAcrossLineageResult.builder() - .setEntity(UrnToEntityMapper.map(searchEntity.getEntity())) + .setEntity(UrnToEntityMapper.map(context, searchEntity.getEntity())) .setInsights(getInsightsFromFeatures(searchEntity.getFeatures())) - .setMatchedFields(getMatchedFieldEntry(searchEntity.getMatchedFields())) - .setPaths(searchEntity.getPaths().stream().map(this::mapPath).collect(Collectors.toList())) + .setMatchedFields(getMatchedFieldEntry(context, searchEntity.getMatchedFields())) + .setPaths( + searchEntity.getPaths().stream() + .map(p -> mapPath(context, p)) + .collect(Collectors.toList())) .setDegree(searchEntity.getDegree()) .setDegrees(searchEntity.getDegrees().stream().collect(Collectors.toList())) .build(); } - private EntityPath mapPath(UrnArray path) { + private EntityPath mapPath(@Nullable final QueryContext context, UrnArray path) { EntityPath entityPath = new EntityPath(); - entityPath.setPath(path.stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); + entityPath.setPath( + path.stream().map(p -> UrnToEntityMapper.map(context, p)).collect(Collectors.toList())); return entityPath; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java index d814c44e469bc..c7c50c8f40c15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; +import javax.annotation.Nullable; public class UrnSearchResultsMapper { public static SearchResults map( + @Nullable final QueryContext context, com.linkedin.metadata.search.SearchResult searchResult) { - return new UrnSearchResultsMapper().apply(searchResult); + return new UrnSearchResultsMapper().apply(context, searchResult); } - public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { + public SearchResults apply( + @Nullable final QueryContext context, com.linkedin.metadata.search.SearchResult input) { final SearchResults result = new SearchResults(); if (!input.hasFrom() || !input.hasPageSize() || !input.hasNumEntities()) { @@ -25,10 +29,12 @@ public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { final SearchResultMetadata searchResultMetadata = input.getMetadata(); result.setSearchResults( - input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + input.getEntities().stream() + .map(r -> MapperUtils.mapResult(context, r)) + .collect(Collectors.toList())); result.setFacets( searchResultMetadata.getAggregations().stream() - .map(MapperUtils::mapFacet) + .map(f -> MapperUtils.mapFacet(context, f)) .collect(Collectors.toList())); result.setSuggestions( searchResultMetadata.getSuggestions().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index b55f574045393..b8781b1230350 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -91,7 +91,7 @@ public List> batchLoad( gmsMlFeatureTable == null ? null : DataFetcherResult.newResult() - .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) + .data(MLFeatureTableMapper.map(context, gmsMlFeatureTable)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -116,7 +116,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -130,7 +130,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "mlFeatureTable", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -152,7 +152,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -160,6 +160,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon throws Exception { final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index 7046ee0f94eeb..bbfa92ae45465 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -81,7 +81,7 @@ public List> batchLoad( gmsMlFeature == null ? null : DataFetcherResult.newResult() - .data(MLFeatureMapper.map(gmsMlFeature)) + .data(MLFeatureMapper.map(context, gmsMlFeature)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -106,7 +106,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -120,6 +120,6 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "mlFeature", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index 8865d2acce12d..24179ffd96426 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -91,7 +91,7 @@ public List> batchLoad( gmsMlModelGroup == null ? null : DataFetcherResult.newResult() - .data(MLModelGroupMapper.map(gmsMlModelGroup)) + .data(MLModelGroupMapper.map(context, gmsMlModelGroup)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -116,7 +116,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -130,7 +130,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "mlModelGroup", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -152,7 +152,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -161,6 +161,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon final StringArray result = _entityClient.getBrowsePaths( MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index a1c689d9f5c1d..c3d29c91b0598 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -86,7 +86,7 @@ public List> batchLoad( gmsMlModel == null ? null : DataFetcherResult.newResult() - .data(MLModelMapper.map(gmsMlModel)) + .data(MLModelMapper.map(context, gmsMlModel)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -111,7 +111,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -124,7 +124,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete(context.getOperationContext(), "mlModel", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -146,7 +146,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -154,6 +154,6 @@ public List browsePaths(@Nonnull String urn, @Nonnull final QueryCon throws Exception { final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index cccb05e8fa0f5..4e1ef996ecc0d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -59,6 +59,7 @@ public Class objectClass() { @Override public List> batchLoad( final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlPrimaryKeyUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); @@ -81,7 +82,7 @@ public List> batchLoad( gmsMlPrimaryKey == null ? null : DataFetcherResult.newResult() - .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) + .data(MLPrimaryKeyMapper.map(context, gmsMlPrimaryKey)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -106,7 +107,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -120,6 +121,6 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), "mlPrimaryKey", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java index 7db1216e1390d..6485313b030cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java @@ -1,18 +1,22 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BaseData; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class BaseDataMapper implements ModelMapper { public static final BaseDataMapper INSTANCE = new BaseDataMapper(); - public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { - return INSTANCE.apply(input); + public static BaseData map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.BaseData input) { + return INSTANCE.apply(context, input); } @Override - public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { + public BaseData apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.BaseData input) { final BaseData result = new BaseData(); result.setDataset(input.getDataset().toString()); result.setMotivation(input.getMotivation()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java index 108717f325f68..b3b642ec9f126 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CaveatsAndRecommendations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class CaveatsAndRecommendationsMapper @@ -12,16 +14,18 @@ public class CaveatsAndRecommendationsMapper new CaveatsAndRecommendationsMapper(); public static CaveatsAndRecommendations map( + @Nullable QueryContext context, @NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - return INSTANCE.apply(caveatsAndRecommendations); + return INSTANCE.apply(context, caveatsAndRecommendations); } @Override public CaveatsAndRecommendations apply( + @Nullable QueryContext context, com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); if (caveatsAndRecommendations.getCaveats() != null) { - result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); + result.setCaveats(CaveatsDetailsMapper.map(context, caveatsAndRecommendations.getCaveats())); } if (caveatsAndRecommendations.getRecommendations() != null) { result.setRecommendations(caveatsAndRecommendations.getRecommendations()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java index 2226197e673f5..9b89e95520546 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CaveatDetails; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class CaveatsDetailsMapper @@ -9,12 +11,14 @@ public class CaveatsDetailsMapper public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); - public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - return INSTANCE.apply(input); + public static CaveatDetails map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + return INSTANCE.apply(context, input); } @Override - public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + public CaveatDetails apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.CaveatDetails input) { final CaveatDetails result = new CaveatDetails(); result.setCaveatDescription(input.getCaveatDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java index 8959e59265e14..4d0983177fb74 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EthicalConsiderations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class EthicalConsiderationsMapper @@ -10,12 +12,14 @@ public class EthicalConsiderationsMapper public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); public static EthicalConsiderations map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - return INSTANCE.apply(ethicalConsiderations); + return INSTANCE.apply(context, ethicalConsiderations); } @Override public EthicalConsiderations apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { final EthicalConsiderations result = new EthicalConsiderations(); result.setData(ethicalConsiderations.getData()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java index 212db94081371..442ce052c1c8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.HyperParameterMap; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.ml.metadata.HyperParameterValueTypeMap; +import javax.annotation.Nullable; import lombok.NonNull; public class HyperParameterMapMapper @@ -10,17 +12,19 @@ public class HyperParameterMapMapper public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); - public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { - return INSTANCE.apply(input); + public static HyperParameterMap map( + @Nullable QueryContext context, @NonNull final HyperParameterValueTypeMap input) { + return INSTANCE.apply(context, input); } @Override - public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { + public HyperParameterMap apply( + @Nullable QueryContext context, @NonNull final HyperParameterValueTypeMap input) { final HyperParameterMap result = new HyperParameterMap(); for (String key : input.keySet()) { result.setKey(key); - result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); + result.setValue(HyperParameterValueTypeMapper.map(context, input.get(key))); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index 81849df320e57..8b5bc445a3609 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BooleanBox; import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.HyperParameterValueType; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class HyperParameterValueTypeMapper @@ -15,12 +17,14 @@ public class HyperParameterValueTypeMapper public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); public static HyperParameterValueType map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - return INSTANCE.apply(input); + return INSTANCE.apply(context, input); } @Override public HyperParameterValueType apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { HyperParameterValueType result = null; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java index 9f724ae71a55e..6a7d1aae7679b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.IntendedUse; import com.linkedin.datahub.graphql.generated.IntendedUserType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class IntendedUseMapper @@ -11,12 +13,16 @@ public class IntendedUseMapper public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); - public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - return INSTANCE.apply(intendedUse); + public static IntendedUse map( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + return INSTANCE.apply(context, intendedUse); } @Override - public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + public IntendedUse apply( + @Nullable QueryContext context, + @NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { final IntendedUse result = new IntendedUse(); result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); result.setPrimaryUses(intendedUse.getPrimaryUses()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index 93c8d0197136b..a4f3aa7a0e226 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -14,6 +15,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; @@ -40,18 +43,21 @@ import com.linkedin.ml.metadata.MLFeatureProperties; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureMapper implements ModelMapper { public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); - public static MLFeature map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLFeature map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLFeature apply(@Nonnull final EntityResponse entityResponse) { + public MLFeature apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLFeature result = new MLFeature(); Urn entityUrn = entityResponse.getUrn(); @@ -62,88 +68,101 @@ public MLFeature apply(@Nonnull final EntityResponse entityResponse) { result.setLastIngested(lastIngested); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); + mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, MLFeatureMapper::mapMLFeatureKey); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); + mlFeature.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, ML_FEATURE_PROPERTIES_ASPECT_NAME, MLFeatureMapper::mapMLFeatureProperties); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> mlFeature.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlFeature, dataMap) -> mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlFeature, dataMap) -> + mlFeature.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlFeature.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLFeatureMapper::mapDomains); mappingHelper.mapToResult( - ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, MLFeatureMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> - entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + entity.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((mlFeature, dataMap) -> mlFeature.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLFeature.class); + } else { + return mappingHelper.getResult(); + } } - private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + private static void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); mlFeature.setName(mlFeatureKey.getName()); mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); } - private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + private static void mapMLFeatureProperties( + @Nullable final QueryContext context, + @Nonnull MLFeature mlFeature, + @Nonnull DataMap dataMap) { MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); + mlFeature.setProperties(MLFeaturePropertiesMapper.map(context, featureProperties)); mlFeature.setDescription(featureProperties.getDescription()); if (featureProperties.getDataType() != null) { mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); } } - private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLFeature entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull MLFeature entity, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLFeature entity, DataMap dataMap) { + private static void mapEditableProperties(MLFeature entity, DataMap dataMap) { EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 7bcefbc305192..92d090275867d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLFeaturePropertiesMapper @@ -13,12 +15,14 @@ public class MLFeaturePropertiesMapper public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); public static MLFeatureProperties map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(mlFeatureProperties); + return INSTANCE.apply(context, mlFeatureProperties); } @Override public MLFeatureProperties apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { final MLFeatureProperties result = new MLFeatureProperties(); @@ -27,7 +31,7 @@ public MLFeatureProperties apply( result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); } if (mlFeatureProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); + result.setVersion(VersionTagMapper.map(context, mlFeatureProperties.getVersion())); } if (mlFeatureProperties.getSources() != null) { result.setSources( diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index 22f1eaeb39082..30bf4dda1cf4f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -14,6 +15,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -40,18 +43,21 @@ import com.linkedin.ml.metadata.MLFeatureTableProperties; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureTableMapper implements ModelMapper { public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); - public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLFeatureTable map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { + public MLFeatureTable apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLFeatureTable result = new MLFeatureTable(); Urn entityUrn = entityResponse.getUrn(); @@ -65,56 +71,63 @@ public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mlFeatureTable.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); mappingHelper.mapToResult( ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, - (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); + (entity, dataMap) -> this.mapMLFeatureTableProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> mlFeatureTable.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); + mlFeatureTable.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlFeatureTable.setDeprecation( + DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLFeatureTableMapper::mapDomains); mappingHelper.mapToResult( ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> - entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + entity.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((mlFeatureTable, dataMap) -> mlFeatureTable.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLFeatureTable.class); + } else { + return mappingHelper.getResult(); + } } private void mapMLFeatureTableKey( @@ -126,27 +139,34 @@ private void mapMLFeatureTableKey( mlFeatureTable.setPlatform(partialPlatform); } - private void mapMLFeatureTableProperties( - @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { + private static void mapMLFeatureTableProperties( + @Nullable final QueryContext context, + @Nonnull MLFeatureTable mlFeatureTable, + @Nonnull DataMap dataMap, + Urn entityUrn) { MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); mlFeatureTable.setFeatureTableProperties( - MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); mlFeatureTable.setProperties( - MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + MLFeatureTablePropertiesMapper.map(context, featureTableProperties, entityUrn)); mlFeatureTable.setDescription(featureTableProperties.getDescription()); } - private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLFeatureTable entity, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index fff504d43c81a..d9fed13ed0d0b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -1,11 +1,15 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLFeatureTablePropertiesMapper { @@ -14,12 +18,14 @@ public class MLFeatureTablePropertiesMapper { new MLFeatureTablePropertiesMapper(); public static MLFeatureTableProperties map( + @Nullable final QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + return INSTANCE.apply(context, mlFeatureTableProperties, entityUrn); } - public MLFeatureTableProperties apply( + public static MLFeatureTableProperties apply( + @Nullable final QueryContext context, @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { final MLFeatureTableProperties result = new MLFeatureTableProperties(); @@ -28,6 +34,7 @@ public MLFeatureTableProperties apply( if (mlFeatureTableProperties.getMlFeatures() != null) { result.setMlFeatures( mlFeatureTableProperties.getMlFeatures().stream() + .filter(f -> context == null || canView(context.getOperationContext(), f)) .map( urn -> { final MLFeature mlFeature = new MLFeature(); @@ -40,6 +47,7 @@ public MLFeatureTableProperties apply( if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { result.setMlPrimaryKeys( mlFeatureTableProperties.getMlPrimaryKeys().stream() + .filter(k -> context == null || canView(context.getOperationContext(), k)) .map( urn -> { final MLPrimaryKey mlPrimaryKey = new MLPrimaryKey(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java index bb3c85e411e71..37989b3bda827 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLHyperParam; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MLHyperParamMapper @@ -9,12 +11,14 @@ public class MLHyperParamMapper public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); - public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - return INSTANCE.apply(input); + public static MLHyperParam map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + return INSTANCE.apply(context, input); } @Override - public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + public MLHyperParam apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLHyperParam input) { final MLHyperParam result = new MLHyperParam(); result.setDescription(input.getDescription()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java index 765a44d218567..80ebabec283bb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLMetric; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MLMetricMapper implements ModelMapper { public static final MLMetricMapper INSTANCE = new MLMetricMapper(); - public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - return INSTANCE.apply(metric); + public static MLMetric map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLMetric metric) { + return INSTANCE.apply(context, metric); } @Override - public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + public MLMetric apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLMetric metric) { final MLMetric result = new MLMetric(); result.setDescription(metric.getDescription()); result.setValue(metric.getValue()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java index e86072ce3848e..4316251a464f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelFactorPrompts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelFactorPromptsMapper @@ -11,24 +13,26 @@ public class MLModelFactorPromptsMapper public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); public static MLModelFactorPrompts map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - return INSTANCE.apply(input); + return INSTANCE.apply(context, input); } @Override public MLModelFactorPrompts apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); if (input.getEvaluationFactors() != null) { mlModelFactorPrompts.setEvaluationFactors( input.getEvaluationFactors().stream() - .map(MLModelFactorsMapper::map) + .map(f -> MLModelFactorsMapper.map(context, f)) .collect(Collectors.toList())); } if (input.getRelevantFactors() != null) { mlModelFactorPrompts.setRelevantFactors( input.getRelevantFactors().stream() - .map(MLModelFactorsMapper::map) + .map(f -> MLModelFactorsMapper.map(context, f)) .collect(Collectors.toList())); } return mlModelFactorPrompts; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java index 3b212eca52801..5607ef8c2cf13 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelFactors; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.ArrayList; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelFactorsMapper @@ -11,12 +13,14 @@ public class MLModelFactorsMapper public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); public static MLModelFactors map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { - return INSTANCE.apply(modelFactors); + return INSTANCE.apply(context, modelFactors); } @Override public MLModelFactors apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { final MLModelFactors result = new MLModelFactors(); if (mlModelFactors.getEnvironment() != null) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index 352884a0f5e38..7e99040e44c82 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -13,6 +14,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; @@ -39,18 +42,21 @@ import com.linkedin.ml.metadata.MLModelGroupProperties; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelGroupMapper implements ModelMapper { public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); - public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLModelGroup map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { + public MLModelGroup apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLModelGroup result = new MLModelGroup(); Urn entityUrn = entityResponse.getUrn(); @@ -64,52 +70,62 @@ public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); + mlModelGroup.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( - ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); + ML_MODEL_GROUP_KEY_ASPECT_NAME, MLModelGroupMapper::mapToMLModelGroupKey); + mappingHelper.mapToResult( + context, + ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, + MLModelGroupMapper::mapToMLModelGroupProperties); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlModelGroup, dataMap) -> mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlModelGroup, dataMap) -> + mlModelGroup.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlModelGroup.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> MLModelGroupMapper.mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLModelGroupMapper::mapDomains); mappingHelper.mapToResult( - ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, MLModelGroupMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mlModelGroup.setBrowsePathV2( + BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((mlModelGroup, dataMap) -> mlModelGroup.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLModelGroup.class); + } else { + return mappingHelper.getResult(); + } } - private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { + private static void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); mlModelGroup.setName(mlModelGroupKey.getName()); mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); @@ -118,28 +134,33 @@ private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { mlModelGroup.setPlatform(partialPlatform); } - private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { + private static void mapToMLModelGroupProperties( + @Nullable final QueryContext context, MLModelGroup mlModelGroup, DataMap dataMap) { MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); + mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(context, modelGroupProperties)); if (modelGroupProperties.getDescription() != null) { mlModelGroup.setDescription(modelGroupProperties.getDescription()); } } - private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLModelGroup entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLModelGroup entity, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { + private static void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index bae60a026b49a..9f1918f9ec489 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelGroupPropertiesMapper @@ -11,18 +13,20 @@ public class MLModelGroupPropertiesMapper public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); public static MLModelGroupProperties map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(mlModelGroupProperties); + return INSTANCE.apply(context, mlModelGroupProperties); } @Override public MLModelGroupProperties apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { final MLModelGroupProperties result = new MLModelGroupProperties(); result.setDescription(mlModelGroupProperties.getDescription()); if (mlModelGroupProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); + result.setVersion(VersionTagMapper.map(context, mlModelGroupProperties.getVersion())); } result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index 2f2bb3c0caf30..a3bc5c663c89a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -15,6 +16,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; @@ -53,18 +56,21 @@ import com.linkedin.structured.StructuredProperties; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelMapper implements ModelMapper { public static final MLModelMapper INSTANCE = new MLModelMapper(); - public static MLModel map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLModel map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLModel apply(@Nonnull final EntityResponse entityResponse) { + public MLModel apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLModel result = new MLModel(); Urn entityUrn = entityResponse.getUrn(); @@ -75,36 +81,36 @@ public MLModel apply(@Nonnull final EntityResponse entityResponse) { result.setLastIngested(lastIngested); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); + mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, MLModelMapper::mapMLModelKey); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mlModel.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( ML_MODEL_PROPERTIES_ASPECT_NAME, - (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapMLModelProperties(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); + (mlModel, dataMap) -> mapGlobalTags(context, mlModel, dataMap, entityUrn)); mappingHelper.mapToResult( INTENDED_USE_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setIntendedUse(IntendedUseMapper.map(new IntendedUse(dataMap)))); + mlModel.setIntendedUse(IntendedUseMapper.map(context, new IntendedUse(dataMap)))); mappingHelper.mapToResult( ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setFactorPrompts( - MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); + MLModelFactorPromptsMapper.map(context, new MLModelFactorPrompts(dataMap)))); mappingHelper.mapToResult( METRICS_ASPECT_NAME, - (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); + (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(context, new Metrics(dataMap)))); mappingHelper.mapToResult( EVALUATION_DATA_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setEvaluationData( new EvaluationData(dataMap) .getEvaluationData().stream() - .map(BaseDataMapper::map) + .map(d -> BaseDataMapper.map(context, d)) .collect(Collectors.toList()))); mappingHelper.mapToResult( TRAINING_DATA_ASPECT_NAME, @@ -112,69 +118,76 @@ public MLModel apply(@Nonnull final EntityResponse entityResponse) { mlModel.setTrainingData( new TrainingData(dataMap) .getTrainingData().stream() - .map(BaseDataMapper::map) + .map(d -> BaseDataMapper.map(context, d)) .collect(Collectors.toList()))); mappingHelper.mapToResult( QUANTITATIVE_ANALYSES_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setQuantitativeAnalyses( - QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); + QuantitativeAnalysesMapper.map(context, new QuantitativeAnalyses(dataMap)))); mappingHelper.mapToResult( ETHICAL_CONSIDERATIONS_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setEthicalConsiderations( - EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); + EthicalConsiderationsMapper.map(context, new EthicalConsiderations(dataMap)))); mappingHelper.mapToResult( CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCaveatsAndRecommendations( - CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); + CaveatsAndRecommendationsMapper.map( + context, new CaveatsAndRecommendations(dataMap)))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(context, SOURCE_CODE_ASPECT_NAME, MLModelMapper::mapSourceCode); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( - COST_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(new Cost(dataMap)))); + COST_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(context, new Cost(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlModel.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLModelMapper::mapDomains); mappingHelper.mapToResult( - ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, MLModelMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((dataset, dataMap) -> dataset.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLModel.class); + } else { + return mappingHelper.getResult(); + } } - private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { + private static void mapMLModelKey(MLModel mlModel, DataMap dataMap) { MLModelKey mlModelKey = new MLModelKey(dataMap); mlModel.setName(mlModelKey.getName()); mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); @@ -183,40 +196,44 @@ private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { mlModel.setPlatform(partialPlatform); } - private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + private static void mapMLModelProperties( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap, Urn entityUrn) { MLModelProperties modelProperties = new MLModelProperties(dataMap); - mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); + mlModel.setProperties(MLModelPropertiesMapper.map(context, modelProperties, entityUrn)); if (modelProperties.getDescription() != null) { mlModel.setDescription(modelProperties.getDescription()); } } - private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); mlModel.setGlobalTags(graphQlGlobalTags); mlModel.setTags(graphQlGlobalTags); } - private void mapSourceCode(MLModel mlModel, DataMap dataMap) { + private static void mapSourceCode( + @Nullable final QueryContext context, MLModel mlModel, DataMap dataMap) { SourceCode sourceCode = new SourceCode(dataMap); com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = new com.linkedin.datahub.graphql.generated.SourceCode(); graphQlSourceCode.setSourceCode( sourceCode.getSourceCode().stream() - .map(SourceCodeUrlMapper::map) + .map(c -> SourceCodeUrlMapper.map(context, c)) .collect(Collectors.toList())); mlModel.setSourceCode(graphQlSourceCode); } - private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull MLModel entity, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLModel entity, DataMap dataMap) { + private static void mapEditableProperties(MLModel entity, DataMap dataMap) { EditableMLModelProperties input = new EditableMLModelProperties(dataMap); MLModelEditableProperties editableProperties = new MLModelEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index f2781f5bca5c8..a89904b3ab915 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -1,10 +1,14 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; + import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MLModelGroup; import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLModelPropertiesMapper { @@ -12,12 +16,16 @@ public class MLModelPropertiesMapper { public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); public static MLModelProperties map( - @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - return INSTANCE.apply(mlModelProperties, entityUrn); + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + Urn entityUrn) { + return INSTANCE.apply(context, mlModelProperties, entityUrn); } public MLModelProperties apply( - @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + @Nullable final QueryContext context, + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, + Urn entityUrn) { final MLModelProperties result = new MLModelProperties(); result.setDate(mlModelProperties.getDate()); @@ -32,7 +40,7 @@ public MLModelProperties apply( if (mlModelProperties.getHyperParams() != null) { result.setHyperParams( mlModelProperties.getHyperParams().stream() - .map(param -> MLHyperParamMapper.map(param)) + .map(param -> MLHyperParamMapper.map(context, param)) .collect(Collectors.toList())); } @@ -42,13 +50,14 @@ public MLModelProperties apply( if (mlModelProperties.getTrainingMetrics() != null) { result.setTrainingMetrics( mlModelProperties.getTrainingMetrics().stream() - .map(metric -> MLMetricMapper.map(metric)) + .map(metric -> MLMetricMapper.map(context, metric)) .collect(Collectors.toList())); } if (mlModelProperties.getGroups() != null) { result.setGroups( mlModelProperties.getGroups().stream() + .filter(g -> context == null || canView(context.getOperationContext(), g)) .map( group -> { final MLModelGroup subgroup = new MLModelGroup(); @@ -61,6 +70,7 @@ public MLModelProperties apply( if (mlModelProperties.getMlFeatures() != null) { result.setMlFeatures( mlModelProperties.getMlFeatures().stream() + .filter(f -> context == null || canView(context.getOperationContext(), f)) .map(Urn::toString) .collect(Collectors.toList())); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index a5556a6a9a0c8..36784f96ea30e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.DataPlatformInstance; @@ -13,6 +14,8 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; @@ -38,18 +41,21 @@ import com.linkedin.ml.metadata.MLPrimaryKeyProperties; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLPrimaryKeyMapper implements ModelMapper { public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); - public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static MLPrimaryKey map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { + public MLPrimaryKey apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final MLPrimaryKey result = new MLPrimaryKey(); Urn entityUrn = entityResponse.getUrn(); @@ -63,61 +69,73 @@ public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); + mlPrimaryKey.setOwnership( + OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( - ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); + ML_PRIMARY_KEY_KEY_ASPECT_NAME, MLPrimaryKeyMapper::mapMLPrimaryKeyKey); + mappingHelper.mapToResult( + context, + ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, + MLPrimaryKeyMapper::mapMLPrimaryKeyProperties); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> mlPrimaryKey.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (mlPrimaryKey, dataMap) -> mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( DEPRECATION_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mlPrimaryKey.setDeprecation(DeprecationMapper.map(context, new Deprecation(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, - (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + (entity, dataMap) -> mapGlobalTags(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> entity.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, MLPrimaryKeyMapper::mapDomains); mappingHelper.mapToResult( - ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, MLPrimaryKeyMapper::mapEditableProperties); mappingHelper.mapToResult( DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> dataset.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult( STRUCTURED_PROPERTIES_ASPECT_NAME, ((entity, dataMap) -> entity.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); mappingHelper.mapToResult( FORMS_ASPECT_NAME, ((entity, dataMap) -> entity.setForms(FormsMapper.map(new Forms(dataMap), entityUrn.toString())))); - return mappingHelper.getResult(); + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), MLPrimaryKey.class); + } else { + return mappingHelper.getResult(); + } } - private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + private static void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); } - private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + private static void mapMLPrimaryKeyProperties( + @Nullable final QueryContext context, MLPrimaryKey mlPrimaryKey, DataMap dataMap) { MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setPrimaryKeyProperties( + MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); + mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(context, primaryKeyProperties)); mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); if (primaryKeyProperties.getDataType() != null) { mlPrimaryKey.setDataType( @@ -125,20 +143,24 @@ private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMa } } - private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { + private static void mapGlobalTags( + @Nullable final QueryContext context, MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { GlobalTags globalTags = new GlobalTags(dataMap); com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = - GlobalTagsMapper.map(globalTags, entityUrn); + GlobalTagsMapper.map(context, globalTags, entityUrn); entity.setTags(graphQlGlobalTags); } - private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, + @Nonnull MLPrimaryKey entity, + @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + entity.setDomain(DomainAssociationMapper.map(context, domains, entity.getUrn())); } - private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { + private static void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); if (input.hasDescription()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 16d6120cd9dff..09e41fe7ee4e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -1,10 +1,12 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; +import javax.annotation.Nullable; import lombok.NonNull; public class MLPrimaryKeyPropertiesMapper @@ -14,12 +16,14 @@ public class MLPrimaryKeyPropertiesMapper public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); public static MLPrimaryKeyProperties map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(mlPrimaryKeyProperties); + return INSTANCE.apply(context, mlPrimaryKeyProperties); } @Override public MLPrimaryKeyProperties apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); @@ -29,7 +33,7 @@ public MLPrimaryKeyProperties apply( MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); } if (mlPrimaryKeyProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); + result.setVersion(VersionTagMapper.map(context, mlPrimaryKeyProperties.getVersion())); } result.setSources( mlPrimaryKeyProperties.getSources().stream() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java index 76fa8c84e9571..ce6357655dfbf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Metrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class MetricsMapper implements ModelMapper { public static final MetricsMapper INSTANCE = new MetricsMapper(); - public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - return INSTANCE.apply(metrics); + public static Metrics map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.Metrics metrics) { + return INSTANCE.apply(context, metrics); } @Override - public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + public Metrics apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.Metrics metrics) { final Metrics result = new Metrics(); result.setDecisionThreshold(metrics.getDecisionThreshold()); result.setPerformanceMeasures(metrics.getPerformanceMeasures()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java index e46cb0a074bd7..fbb259666c273 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.QuantitativeAnalyses; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class QuantitativeAnalysesMapper @@ -10,17 +12,20 @@ public class QuantitativeAnalysesMapper public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); public static QuantitativeAnalyses map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - return INSTANCE.apply(quantitativeAnalyses); + return INSTANCE.apply(context, quantitativeAnalyses); } @Override public QuantitativeAnalyses apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { final QuantitativeAnalyses result = new QuantitativeAnalyses(); result.setIntersectionalResults( - ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); - result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); + ResultsTypeMapper.map(context, quantitativeAnalyses.getIntersectionalResults())); + result.setUnitaryResults( + ResultsTypeMapper.map(context, quantitativeAnalyses.getUnitaryResults())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java index 4b6529c59db3e..e73f80511fbaa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ResultsType; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nullable; import lombok.NonNull; public class ResultsTypeMapper @@ -10,12 +12,14 @@ public class ResultsTypeMapper public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); - public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - return INSTANCE.apply(input); + public static ResultsType map( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.ResultsType input) { + return INSTANCE.apply(context, input); } @Override - public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + public ResultsType apply( + @Nullable QueryContext context, @NonNull final com.linkedin.ml.metadata.ResultsType input) { final ResultsType result; if (input.isString()) { result = new StringBox(input.getString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java index b6bd5efdc4217..1b0695e599349 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java @@ -1,20 +1,24 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SourceCodeUrl; import com.linkedin.datahub.graphql.generated.SourceCodeUrlType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SourceCodeUrlMapper implements ModelMapper { public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); - public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - return INSTANCE.apply(input); + public static SourceCodeUrl map( + @Nullable QueryContext context, @Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + return INSTANCE.apply(context, input); } @Override - public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + public SourceCodeUrl apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { final SourceCodeUrl results = new SourceCodeUrl(); results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java index 5758a52538c1e..4020ef6e35ece 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java @@ -1,20 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; import com.linkedin.common.VersionTag; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class VersionTagMapper implements ModelMapper { public static final VersionTagMapper INSTANCE = new VersionTagMapper(); public static com.linkedin.datahub.graphql.generated.VersionTag map( - @Nonnull final VersionTag versionTag) { - return INSTANCE.apply(versionTag); + @Nullable QueryContext context, @Nonnull final VersionTag versionTag) { + return INSTANCE.apply(context, versionTag); } @Override - public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { + public com.linkedin.datahub.graphql.generated.VersionTag apply( + @Nullable QueryContext context, @Nonnull final VersionTag input) { final com.linkedin.datahub.graphql.generated.VersionTag result = new com.linkedin.datahub.graphql.generated.VersionTag(); result.setVersionTag(input.getVersionTag()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index a8e964581dfd5..e40690d58eb1c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -100,7 +100,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -114,7 +114,7 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), NOTEBOOK_ENTITY_NAME, query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -139,7 +139,7 @@ public BrowseResults browse( facetFilters, start, count); - return BrowseResultMapper.map(result); + return BrowseResultMapper.map(context, result); } @Override @@ -148,7 +148,7 @@ public List browsePaths(@Nonnull String urn, @Nonnull QueryContext c final StringArray result = _entityClient.getBrowsePaths( NotebookUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); + return BrowsePathsMapper.map(context, result); } @Override @@ -185,7 +185,7 @@ public List> batchLoad( entityResponse == null ? null : DataFetcherResult.newResult() - .data(NotebookMapper.map(entityResponse)) + .data(NotebookMapper.map(context, entityResponse)) .build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -207,9 +207,9 @@ public Notebook update( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - Collection proposals = NotebookUpdateInputMapper.map(input, actor); + CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + Collection proposals = + NotebookUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { @@ -227,7 +227,7 @@ private boolean isAuthorized( final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.NOTEBOOK_PRIVILEGES.getResourceType(), urn, orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java index a263e31b26faf..109006f9d4a90 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.canView; import static com.linkedin.metadata.Constants.*; import com.linkedin.common.BrowsePathsV2; @@ -12,6 +13,8 @@ import com.linkedin.common.SubTypes; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; import com.linkedin.datahub.graphql.generated.ChartCell; @@ -46,16 +49,17 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class NotebookMapper implements ModelMapper { public static final NotebookMapper INSTANCE = new NotebookMapper(); - public static Notebook map(EntityResponse response) { - return INSTANCE.apply(response); + public static Notebook map(@Nullable final QueryContext context, EntityResponse response) { + return INSTANCE.apply(context, response); } @Override - public Notebook apply(EntityResponse response) { + public Notebook apply(@Nullable final QueryContext context, EntityResponse response) { final Notebook convertedNotebook = new Notebook(); Urn entityUrn = response.getUrn(); @@ -63,45 +67,54 @@ public Notebook apply(EntityResponse response) { convertedNotebook.setType(EntityType.NOTEBOOK); EnvelopedAspectMap aspectMap = response.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, convertedNotebook); - mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, this::mapNotebookKey); + mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, NotebookMapper::mapNotebookKey); mappingHelper.mapToResult( NOTEBOOK_INFO_ASPECT_NAME, - (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(NOTEBOOK_CONTENT_ASPECT_NAME, this::mapNotebookContent); + (entity, dataMap) -> mapNotebookInfo(context, entity, dataMap, entityUrn)); mappingHelper.mapToResult( - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); + context, NOTEBOOK_CONTENT_ASPECT_NAME, NotebookMapper::mapNotebookContent); + mappingHelper.mapToResult( + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, NotebookMapper::mapEditableNotebookProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, (notebook, dataMap) -> - notebook.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + notebook.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); + (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(context, new Status(dataMap)))); mappingHelper.mapToResult( GLOBAL_TAGS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + notebook.setTags(GlobalTagsMapper.map(context, new GlobalTags(dataMap), entityUrn))); mappingHelper.mapToResult( INSTITUTIONAL_MEMORY_ASPECT_NAME, (notebook, dataMap) -> notebook.setInstitutionalMemory( - InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); + InstitutionalMemoryMapper.map( + context, new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(context, DOMAINS_ASPECT_NAME, NotebookMapper::mapDomains); + mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, NotebookMapper::mapSubTypes); mappingHelper.mapToResult( GLOSSARY_TERMS_ASPECT_NAME, (notebook, dataMap) -> notebook.setGlossaryTerms( - GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapDataPlatformInstance); + GlossaryTermsMapper.map(context, new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + context, DATA_PLATFORM_INSTANCE_ASPECT_NAME, NotebookMapper::mapDataPlatformInstance); mappingHelper.mapToResult( BROWSE_PATHS_V2_ASPECT_NAME, (notebook, dataMap) -> - notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(context, new BrowsePathsV2(dataMap)))); + + if (context != null && !canView(context.getOperationContext(), entityUrn)) { + return AuthorizationUtils.restrictEntity(mappingHelper.getResult(), Notebook.class); + } else { + return mappingHelper.getResult(); + } } - private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { + private static void mapDataPlatformInstance( + @Nullable final QueryContext context, Notebook notebook, DataMap dataMap) { DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); notebook.setPlatform( DataPlatform.builder() @@ -109,10 +122,10 @@ private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { .setUrn(dataPlatformInstance.getPlatform().toString()) .build()); notebook.setDataPlatformInstance( - DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); + DataPlatformInstanceAspectMapper.map(context, new DataPlatformInstance(dataMap))); } - private void mapSubTypes(Notebook notebook, DataMap dataMap) { + private static void mapSubTypes(Notebook notebook, DataMap dataMap) { SubTypes pegasusSubTypes = new SubTypes(dataMap); if (pegasusSubTypes.hasTypeNames()) { com.linkedin.datahub.graphql.generated.SubTypes subTypes = @@ -122,20 +135,23 @@ private void mapSubTypes(Notebook notebook, DataMap dataMap) { } } - private void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final NotebookKey notebookKey = new NotebookKey(dataMap); notebook.setNotebookId(notebookKey.getNotebookId()); notebook.setTool(notebookKey.getNotebookTool()); } - private void mapNotebookInfo( - @Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { + private static void mapNotebookInfo( + @Nullable final QueryContext context, + @Nonnull Notebook notebook, + @Nonnull DataMap dataMap, + Urn entityUrn) { final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = new com.linkedin.notebook.NotebookInfo(dataMap); final NotebookInfo notebookInfo = new NotebookInfo(); notebookInfo.setTitle(gmsNotebookInfo.getTitle()); notebookInfo.setChangeAuditStamps( - ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, gmsNotebookInfo.getChangeAuditStamps())); notebookInfo.setDescription(gmsNotebookInfo.getDescription()); if (gmsNotebookInfo.hasExternalUrl()) { @@ -149,16 +165,17 @@ private void mapNotebookInfo( notebook.setInfo(notebookInfo); } - private void mapNotebookContent(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapNotebookContent( + @Nullable final QueryContext context, @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { com.linkedin.notebook.NotebookContent pegasusNotebookContent = new com.linkedin.notebook.NotebookContent(dataMap); NotebookContent notebookContent = new NotebookContent(); - notebookContent.setCells(mapNotebookCells(pegasusNotebookContent.getCells())); + notebookContent.setCells(mapNotebookCells(context, pegasusNotebookContent.getCells())); notebook.setContent(notebookContent); } - private List mapNotebookCells( - com.linkedin.notebook.NotebookCellArray pegasusCells) { + private static List mapNotebookCells( + @Nullable final QueryContext context, com.linkedin.notebook.NotebookCellArray pegasusCells) { return pegasusCells.stream() .map( pegasusCell -> { @@ -168,13 +185,13 @@ private List mapNotebookCells( notebookCell.setType(cellType); switch (cellType) { case CHART_CELL: - notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); + notebookCell.setChartCell(mapChartCell(context, pegasusCell.getChartCell())); break; case TEXT_CELL: - notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); + notebookCell.setTextCell(mapTextCell(context, pegasusCell.getTextCell())); break; case QUERY_CELL: - notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); + notebookCell.setQueryChell(mapQueryCell(context, pegasusCell.getQueryCell())); break; default: throw new DataHubGraphQLException( @@ -186,39 +203,43 @@ private List mapNotebookCells( .collect(Collectors.toList()); } - private ChartCell mapChartCell(com.linkedin.notebook.ChartCell pegasusChartCell) { + private static ChartCell mapChartCell( + @Nullable final QueryContext context, com.linkedin.notebook.ChartCell pegasusChartCell) { ChartCell chartCell = new ChartCell(); chartCell.setCellId(pegasusChartCell.getCellId()); chartCell.setCellTitle(pegasusChartCell.getCellTitle()); chartCell.setChangeAuditStamps( - ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, pegasusChartCell.getChangeAuditStamps())); return chartCell; } - private TextCell mapTextCell(com.linkedin.notebook.TextCell pegasusTextCell) { + private static TextCell mapTextCell( + @Nullable final QueryContext context, com.linkedin.notebook.TextCell pegasusTextCell) { TextCell textCell = new TextCell(); textCell.setCellId(pegasusTextCell.getCellId()); textCell.setCellTitle(pegasusTextCell.getCellTitle()); textCell.setChangeAuditStamps( - ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, pegasusTextCell.getChangeAuditStamps())); textCell.setText(pegasusTextCell.getText()); return textCell; } - private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) { + private static QueryCell mapQueryCell( + @Nullable final QueryContext context, com.linkedin.notebook.QueryCell pegasusQueryCell) { QueryCell queryCell = new QueryCell(); queryCell.setCellId(pegasusQueryCell.getCellId()); queryCell.setCellTitle(pegasusQueryCell.getCellTitle()); queryCell.setChangeAuditStamps( - ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); + ChangeAuditStampsMapper.map(context, pegasusQueryCell.getChangeAuditStamps())); queryCell.setRawQuery(pegasusQueryCell.getRawQuery()); if (pegasusQueryCell.hasLastExecuted()) { - queryCell.setLastExecuted(AuditStampMapper.map(pegasusQueryCell.getLastExecuted())); + queryCell.setLastExecuted(AuditStampMapper.map(context, pegasusQueryCell.getLastExecuted())); } return queryCell; } - private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapEditableNotebookProperties( + @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final EditableNotebookProperties editableNotebookProperties = new EditableNotebookProperties(dataMap); final NotebookEditableProperties notebookEditableProperties = new NotebookEditableProperties(); @@ -226,9 +247,10 @@ private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull notebook.setEditableProperties(notebookEditableProperties); } - private void mapDomains(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { + private static void mapDomains( + @Nullable final QueryContext context, @Nonnull Notebook notebook, @Nonnull DataMap dataMap) { final Domains domains = new Domains(dataMap); // Currently we only take the first domain if it exists. - notebook.setDomain(DomainAssociationMapper.map(domains, notebook.getUrn())); + notebook.setDomain(DomainAssociationMapper.map(context, domains, notebook.getUrn())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java index 0d6c70e07053f..62d1e488482ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java @@ -8,6 +8,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.dashboard.EditableDashboardProperties; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipUpdateMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; @@ -18,6 +19,7 @@ import java.util.Collection; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class NotebookUpdateInputMapper implements InputModelMapper, Urn> { @@ -25,12 +27,15 @@ public class NotebookUpdateInputMapper public static final NotebookUpdateInputMapper INSTANCE = new NotebookUpdateInputMapper(); public static Collection map( - @Nonnull final NotebookUpdateInput notebookUpdateInput, @Nonnull final Urn actor) { - return INSTANCE.apply(notebookUpdateInput, actor); + @Nullable final QueryContext context, + @Nonnull final NotebookUpdateInput notebookUpdateInput, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, notebookUpdateInput, actor); } @Override - public Collection apply(NotebookUpdateInput input, Urn actor) { + public Collection apply( + @Nullable final QueryContext context, NotebookUpdateInput input, Urn actor) { final Collection proposals = new ArrayList<>(3); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(NOTEBOOK_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -40,7 +45,8 @@ public Collection apply(NotebookUpdateInput input, Urn a if (input.getOwnership() != null) { proposals.add( updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(input.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); + OwnershipUpdateMapper.map(context, input.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (input.getTags() != null) { @@ -48,7 +54,7 @@ public Collection apply(NotebookUpdateInput input, Urn a globalTags.setTags( new TagAssociationArray( input.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) + .map(t -> TagAssociationUpdateMapper.map(context, t)) .collect(Collectors.toList()))); proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java index f7ed4c59a805a..6b78ba113225e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java @@ -69,7 +69,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(OwnershipTypeMapper.map(gmsResult)) + .data(OwnershipTypeMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java index 9eebe95df8d8c..76d41897dafd6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; @@ -15,17 +16,19 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class OwnershipTypeMapper implements ModelMapper { public static final OwnershipTypeMapper INSTANCE = new OwnershipTypeMapper(); - public static OwnershipTypeEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static OwnershipTypeEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { + public OwnershipTypeEntity apply(@Nullable QueryContext context, @Nonnull EntityResponse input) { final OwnershipTypeEntity result = new OwnershipTypeEntity(); result.setUrn(input.getUrn().toString()); @@ -35,7 +38,7 @@ public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { mappingHelper.mapToResult(OWNERSHIP_TYPE_INFO_ASPECT_NAME, this::mapOwnershipTypeInfo); mappingHelper.mapToResult( STATUS_ASPECT_NAME, - (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(context, new Status(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java index 318818b8a2140..e40ae84f2c131 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.DataHubPolicy; import com.linkedin.datahub.graphql.generated.EntityType; @@ -26,28 +27,32 @@ import java.net.URISyntaxException; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataHubPolicyMapper implements ModelMapper { public static final DataHubPolicyMapper INSTANCE = new DataHubPolicyMapper(); - public static DataHubPolicy map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataHubPolicy map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataHubPolicy apply(@Nonnull final EntityResponse entityResponse) { + public DataHubPolicy apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataHubPolicy result = new DataHubPolicy(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.DATAHUB_POLICY); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATAHUB_POLICY_INFO_ASPECT_NAME, this::mapDataHubPolicyInfo); + mappingHelper.mapToResult(context, DATAHUB_POLICY_INFO_ASPECT_NAME, this::mapDataHubPolicyInfo); return mappingHelper.getResult(); } - private void mapDataHubPolicyInfo(@Nonnull DataHubPolicy policy, @Nonnull DataMap dataMap) { + private void mapDataHubPolicyInfo( + @Nullable QueryContext context, @Nonnull DataHubPolicy policy, @Nonnull DataMap dataMap) { DataHubPolicyInfo policyInfo = new DataHubPolicyInfo(dataMap); policy.setDescription(policyInfo.getDescription()); // Careful - we assume no other Policy types or states have been ingested using a backdoor. @@ -58,7 +63,7 @@ private void mapDataHubPolicyInfo(@Nonnull DataHubPolicy policy, @Nonnull DataMa policy.setActors(mapActors(policyInfo.getActors())); policy.setEditable(policyInfo.isEditable()); if (policyInfo.hasResources()) { - policy.setResources(mapResources(policyInfo.getResources())); + policy.setResources(mapResources(context, policyInfo.getResources())); } } @@ -88,7 +93,8 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { return result; } - private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) { + private ResourceFilter mapResources( + @Nullable QueryContext context, final DataHubResourceFilter resourceFilter) { final ResourceFilter result = new ResourceFilter(); result.setAllResources(resourceFilter.isAllResources()); if (resourceFilter.hasType()) { @@ -98,12 +104,13 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) result.setResources(resourceFilter.getResources()); } if (resourceFilter.hasFilter()) { - result.setFilter(mapFilter(resourceFilter.getFilter())); + result.setFilter(mapFilter(context, resourceFilter.getFilter())); } return result; } - private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { + private PolicyMatchFilter mapFilter( + @Nullable QueryContext context, final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() .setCriteria( filter.getCriteria().stream() @@ -113,7 +120,7 @@ private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter .setField(criterion.getField()) .setValues( criterion.getValues().stream() - .map(this::mapValue) + .map(c -> mapValue(context, c)) .collect(Collectors.toList())) .setCondition( PolicyMatchCondition.valueOf(criterion.getCondition().name())) @@ -122,13 +129,13 @@ private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter .build(); } - private PolicyMatchCriterionValue mapValue(final String value) { + private PolicyMatchCriterionValue mapValue(@Nullable QueryContext context, final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); return PolicyMatchCriterionValue.builder() .setValue(value) - .setEntity(UrnToEntityMapper.map(urn)) + .setEntity(UrnToEntityMapper.map(context, urn)) .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java index 3dea9046dcf36..ac7cd2bc0a83c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java @@ -66,7 +66,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataHubPolicyMapper.map(gmsResult)) + .data(DataHubPolicyMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java index f35111f78a694..674011a4f2f28 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Media; @@ -17,17 +18,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.post.PostInfo; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class PostMapper implements ModelMapper { public static final PostMapper INSTANCE = new PostMapper(); - public static Post map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Post map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Post apply(@Nonnull final EntityResponse entityResponse) { + public Post apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Post result = new Post(); result.setUrn(entityResponse.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index 2e504f6406581..e71b569e9ae23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -6,6 +6,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; @@ -25,6 +26,7 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -32,19 +34,21 @@ public class QueryMapper implements ModelMapper { public static final QueryMapper INSTANCE = new QueryMapper(); - public static QueryEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static QueryEntity map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public QueryEntity apply(@Nonnull final EntityResponse entityResponse) { + public QueryEntity apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final QueryEntity result = new QueryEntity(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.QUERY); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(QUERY_PROPERTIES_ASPECT_NAME, this::mapQueryProperties); + mappingHelper.mapToResult(context, QUERY_PROPERTIES_ASPECT_NAME, this::mapQueryProperties); mappingHelper.mapToResult(QUERY_SUBJECTS_ASPECT_NAME, this::mapQuerySubjects); mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapPlatform); return mappingHelper.getResult(); @@ -60,7 +64,8 @@ private void mapPlatform(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { } } - private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { + private void mapQueryProperties( + @Nullable final QueryContext context, @Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QueryProperties queryProperties = new QueryProperties(dataMap); com.linkedin.datahub.graphql.generated.QueryProperties res = new com.linkedin.datahub.graphql.generated.QueryProperties(); @@ -74,7 +79,7 @@ private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dat res.setName(queryProperties.getName(GetMode.NULL)); res.setDescription(queryProperties.getDescription(GetMode.NULL)); if (queryProperties.hasOrigin() && queryProperties.getOrigin() != null) { - res.setOrigin(UrnToEntityMapper.map(queryProperties.getOrigin())); + res.setOrigin(UrnToEntityMapper.map(context, queryProperties.getOrigin())); } AuditStamp created = new AuditStamp(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index 087c93a97e314..602b3699d11e4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -60,7 +60,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : viewUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } @@ -70,7 +70,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(QueryMapper.map(gmsResult)) + .data(QueryMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java index db086e682d57c..b20e78e149c3e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataFlowDataJobsRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataFlowDataJobsRelationshipsMapper implements ModelMapper { @@ -12,17 +14,19 @@ public class DataFlowDataJobsRelationshipsMapper new DataFlowDataJobsRelationshipsMapper(); public static DataFlowDataJobsRelationships map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); + return INSTANCE.apply(context, relationships); } @Override public DataFlowDataJobsRelationships apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships input) { final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); result.setEntities( input.getRelationships().stream() - .map(EntityRelationshipLegacyMapper::map) + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java index 4df64c7ecb85e..6a03a060c3687 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DownstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DownstreamEntityRelationshipsMapper implements ModelMapper { @@ -12,17 +14,19 @@ public class DownstreamEntityRelationshipsMapper new DownstreamEntityRelationshipsMapper(); public static DownstreamEntityRelationships map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); + return INSTANCE.apply(context, relationships); } @Override public DownstreamEntityRelationships apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships input) { final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); result.setEntities( input.getRelationships().stream() - .map(EntityRelationshipLegacyMapper::map) + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java index e3743804b4908..7ab37031d824c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy; import com.linkedin.datahub.graphql.generated.EntityWithRelationships; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class EntityRelationshipLegacyMapper implements ModelMapper { @@ -14,22 +16,24 @@ public class EntityRelationshipLegacyMapper new EntityRelationshipLegacyMapper(); public static EntityRelationshipLegacy map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationship relationship) { - return INSTANCE.apply(relationship); + return INSTANCE.apply(context, relationship); } @Override public EntityRelationshipLegacy apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationship relationship) { final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); EntityWithRelationships partialLineageEntity = - (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); + (EntityWithRelationships) UrnToEntityMapper.map(context, relationship.getEntity()); if (partialLineageEntity != null) { result.setEntity(partialLineageEntity); } if (relationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(relationship.getCreated())); + result.setCreated(AuditStampMapper.map(context, relationship.getCreated())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java index 832e1bb396b3b..35abc849e8f97 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.relationships.mappers; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UpstreamEntityRelationshipsMapper implements ModelMapper { @@ -12,17 +14,19 @@ public class UpstreamEntityRelationshipsMapper new UpstreamEntityRelationshipsMapper(); public static UpstreamEntityRelationships map( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); + return INSTANCE.apply(context, relationships); } @Override public UpstreamEntityRelationships apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.common.EntityRelationships input) { final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); result.setEntities( input.getRelationships().stream() - .map(EntityRelationshipLegacyMapper::map) + .map(r -> EntityRelationshipLegacyMapper.map(context, r)) .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java index 61186fc9f77e5..cf40cc51d1e23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedMapper.java @@ -4,7 +4,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Restricted; import com.linkedin.entity.EntityResponse; -import com.linkedin.metadata.service.RestrictedService; +import io.datahubproject.metadata.services.RestrictedService; import javax.annotation.Nonnull; public class RestrictedMapper { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java index a2030bb596d10..a245397c8d83d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/restricted/RestrictedType.java @@ -9,8 +9,8 @@ import com.linkedin.datahub.graphql.types.EntityType; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.service.RestrictedService; import graphql.execution.DataFetcherResult; +import io.datahubproject.metadata.services.RestrictedService; import java.util.*; import java.util.function.Function; import java.util.stream.Collectors; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java index 9521945770195..530518d1cd14c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java @@ -67,7 +67,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataHubRoleMapper.map(gmsResult)) + .data(DataHubRoleMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java index 7a467886fc084..7ba42b08cdc6a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; @@ -11,17 +12,20 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.policy.DataHubRoleInfo; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class DataHubRoleMapper implements ModelMapper { public static final DataHubRoleMapper INSTANCE = new DataHubRoleMapper(); - public static DataHubRole map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static DataHubRole map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public DataHubRole apply(@Nonnull final EntityResponse entityResponse) { + public DataHubRole apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final DataHubRole result = new DataHubRole(); result.setUrn(entityResponse.getUrn().toString()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index cc77ee46d65dc..46d494bc9219c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -86,7 +86,9 @@ public List> batchLoad( gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(RoleMapper.map(gmsResult)).build()) + : DataFetcherResult.newResult() + .data(RoleMapper.map(context, gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Role", e); @@ -109,7 +111,7 @@ public SearchResults search( Collections.emptyMap(), start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -123,6 +125,6 @@ public AutoCompleteResults autoComplete( final AutoCompleteResult result = _entityClient.autoComplete( context.getOperationContext(), Constants.ROLE_ENTITY_NAME, query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java index df18b7c89fafc..80337cd9a5338 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Actor; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.EntityType; @@ -18,13 +19,15 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class RoleMapper implements ModelMapper { public static final RoleMapper INSTANCE = new RoleMapper(); - public static Role map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Role map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { @@ -59,7 +62,7 @@ private static List mapRoleUsers(RoleUserArray users) { } @Override - public Role apply(EntityResponse input) { + public Role apply(@Nullable QueryContext context, EntityResponse input) { final Role result = new Role(); final Urn entityUrn = input.getUrn(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java index 254a1ed1767f1..3d1833e9c944a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.STRUCTURED_PROPERTIES_ASPECT_NAME; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; @@ -13,19 +14,22 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.structured.StructuredProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class SchemaFieldMapper implements ModelMapper { public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); - public static SchemaFieldEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static SchemaFieldEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public SchemaFieldEntity apply(@Nonnull final EntityResponse entityResponse) { + public SchemaFieldEntity apply( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { Urn entityUrn = entityResponse.getUrn(); - final SchemaFieldEntity result = this.mapSchemaFieldUrn(entityUrn); + final SchemaFieldEntity result = this.mapSchemaFieldUrn(context, entityUrn); EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); @@ -33,19 +37,19 @@ public SchemaFieldEntity apply(@Nonnull final EntityResponse entityResponse) { STRUCTURED_PROPERTIES_ASPECT_NAME, ((schemaField, dataMap) -> schemaField.setStructuredProperties( - StructuredPropertiesMapper.map(new StructuredProperties(dataMap))))); + StructuredPropertiesMapper.map(context, new StructuredProperties(dataMap))))); return result; } - private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { + private SchemaFieldEntity mapSchemaFieldUrn(@Nullable QueryContext context, Urn urn) { try { SchemaFieldEntity result = new SchemaFieldEntity(); result.setUrn(urn.toString()); result.setType(EntityType.SCHEMA_FIELD); result.setFieldPath(urn.getEntityKey().get(1)); Urn parentUrn = Urn.createFromString(urn.getEntityKey().get(0)); - result.setParent(UrnToEntityMapper.map(parentUrn)); + result.setParent(UrnToEntityMapper.map(context, parentUrn)); return result; } catch (Exception e) { throw new RuntimeException("Failed to load schemaField entity", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index 9f14bf52733ea..6017f368eea24 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -87,7 +87,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(SchemaFieldMapper.map(gmsResult)) + .data(SchemaFieldMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java index ad48067599328..dc1ff7ca32971 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertiesMapper.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.types.structuredproperty; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.NumberValue; @@ -15,6 +16,7 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @Slf4j @@ -23,23 +25,23 @@ public class StructuredPropertiesMapper { public static final StructuredPropertiesMapper INSTANCE = new StructuredPropertiesMapper(); public static com.linkedin.datahub.graphql.generated.StructuredProperties map( - @Nonnull final StructuredProperties structuredProperties) { - return INSTANCE.apply(structuredProperties); + @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { + return INSTANCE.apply(context, structuredProperties); } public com.linkedin.datahub.graphql.generated.StructuredProperties apply( - @Nonnull final StructuredProperties structuredProperties) { + @Nullable QueryContext context, @Nonnull final StructuredProperties structuredProperties) { com.linkedin.datahub.graphql.generated.StructuredProperties result = new com.linkedin.datahub.graphql.generated.StructuredProperties(); result.setProperties( structuredProperties.getProperties().stream() - .map(this::mapStructuredProperty) + .map(p -> mapStructuredProperty(context, p)) .collect(Collectors.toList())); return result; } private StructuredPropertiesEntry mapStructuredProperty( - StructuredPropertyValueAssignment valueAssignment) { + @Nullable QueryContext context, StructuredPropertyValueAssignment valueAssignment) { StructuredPropertiesEntry entry = new StructuredPropertiesEntry(); entry.setStructuredProperty(createStructuredPropertyEntity(valueAssignment)); final List values = new ArrayList<>(); @@ -49,7 +51,7 @@ private StructuredPropertiesEntry mapStructuredProperty( .forEach( value -> { if (value.isString()) { - this.mapStringValue(value.getString(), values, entities); + this.mapStringValue(context, value.getString(), values, entities); } else if (value.isDouble()) { values.add(new NumberValue(value.getDouble())); } @@ -67,11 +69,14 @@ private StructuredPropertyEntity createStructuredPropertyEntity( return entity; } - private void mapStringValue( - String stringValue, List values, List entities) { + private static void mapStringValue( + @Nullable QueryContext context, + String stringValue, + List values, + List entities) { try { final Urn urnValue = Urn.createFromString(stringValue); - entities.add(UrnToEntityMapper.map(urnValue)); + entities.add(UrnToEntityMapper.map(context, urnValue)); } catch (Exception e) { log.debug("String value is not an urn for this structured property entry"); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java index 259020b83bee1..b3abab5ed3d36 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyMapper.java @@ -5,6 +5,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.StringArrayMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AllowedValue; import com.linkedin.datahub.graphql.generated.DataTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; @@ -24,6 +25,7 @@ import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class StructuredPropertyMapper implements ModelMapper { @@ -32,12 +34,14 @@ public class StructuredPropertyMapper public static final StructuredPropertyMapper INSTANCE = new StructuredPropertyMapper(); - public static StructuredPropertyEntity map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static StructuredPropertyEntity map( + @Nullable QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public StructuredPropertyEntity apply(@Nonnull final EntityResponse entityResponse) { + public StructuredPropertyEntity apply( + @Nullable QueryContext queryContext, @Nonnull final EntityResponse entityResponse) { final StructuredPropertyEntity result = new StructuredPropertyEntity(); result.setUrn(entityResponse.getUrn().toString()); result.setType(EntityType.STRUCTURED_PROPERTY); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java index b028563b5253c..9d50b7d54e4ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/structuredproperty/StructuredPropertyType.java @@ -69,7 +69,7 @@ public List> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(StructuredPropertyMapper.map(gmsResult)) + .data(StructuredPropertyMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index a3c9bc380bdcf..8760e62c8206f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -88,7 +88,7 @@ public List> batchLoad( _entityClient.batchGetV2( TAG_ENTITY_NAME, new HashSet<>(tagUrns), null, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : tagUrns) { gmsResults.add(tagMap.getOrDefault(urn, null)); } @@ -97,7 +97,9 @@ public List> batchLoad( gmsTag -> gmsTag == null ? null - : DataFetcherResult.newResult().data(TagMapper.map(gmsTag)).build()) + : DataFetcherResult.newResult() + .data(TagMapper.map(context, gmsTag)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Tags", e); @@ -121,7 +123,7 @@ public SearchResults search( facetFilters, start, count); - return UrnSearchResultsMapper.map(searchResult); + return UrnSearchResultsMapper.map(context, searchResult); } @Override @@ -134,7 +136,7 @@ public AutoCompleteResults autoComplete( throws Exception { final AutoCompleteResult result = _entityClient.autoComplete(context.getOperationContext(), "tag", query, filters, limit); - return AutoCompleteResultsMapper.map(result); + return AutoCompleteResultsMapper.map(context, result); } @Override @@ -142,9 +144,9 @@ public Tag update( @Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) throws Exception { if (isAuthorized(input, context)) { - final CorpuserUrn actor = - CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = TagUpdateInputMapper.map(input, actor); + final CorpuserUrn actor = CorpuserUrn.createFromString(context.getActorUrn()); + final Collection proposals = + TagUpdateInputMapper.map(context, input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); @@ -163,7 +165,7 @@ private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryConte final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), + context.getActorUrn(), PoliciesConfig.TAG_PRIVILEGES.getResourceType(), update.getUrn(), orPrivilegeGroups); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java index 72665535e5980..cadeef9941034 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java @@ -3,37 +3,49 @@ import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Tag; +import java.util.Optional; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class GlobalTagsMapper { public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); public static com.linkedin.datahub.graphql.generated.GlobalTags map( - @Nonnull final GlobalTags standardTags, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(standardTags, entityUrn); + @Nullable final QueryContext context, + @Nonnull final GlobalTags standardTags, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(context, standardTags, entityUrn); } public com.linkedin.datahub.graphql.generated.GlobalTags apply( - @Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { + @Nullable final QueryContext context, + @Nonnull final GlobalTags input, + @Nonnull final Urn entityUrn) { final com.linkedin.datahub.graphql.generated.GlobalTags result = new com.linkedin.datahub.graphql.generated.GlobalTags(); result.setTags( input.getTags().stream() - .map(tag -> this.mapTagAssociation(tag, entityUrn)) + .map(tag -> mapTagAssociation(context, tag, entityUrn)) + .filter(Optional::isPresent) + .map(Optional::get) .collect(Collectors.toList())); return result; } - private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( - @Nonnull final TagAssociation input, @Nonnull final Urn entityUrn) { + private static Optional mapTagAssociation( + @Nullable final QueryContext context, + @Nonnull final TagAssociation input, + @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.TagAssociation result = new com.linkedin.datahub.graphql.generated.TagAssociation(); final Tag resultTag = new Tag(); resultTag.setUrn(input.getTag().toString()); result.setTag(resultTag); result.setAssociatedUrn(entityUrn.toString()); - return result; + return Optional.of(result); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java index 3792a42376004..cb024fd6953f2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java @@ -2,21 +2,26 @@ import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.TagUrn; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.TagAssociationUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.net.URISyntaxException; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class TagAssociationUpdateMapper implements ModelMapper { public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); - public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { - return INSTANCE.apply(tagAssociationUpdate); + public static TagAssociation map( + @Nullable final QueryContext context, + @Nonnull final TagAssociationUpdate tagAssociationUpdate) { + return INSTANCE.apply(context, tagAssociationUpdate); } - public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { + public TagAssociation apply( + @Nullable final QueryContext context, final TagAssociationUpdate tagAssociationUpdate) { final TagAssociation output = new TagAssociation(); try { output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java index d6ce24582678d..d7971d1788c03 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java @@ -7,6 +7,7 @@ import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; import com.linkedin.data.template.RecordTemplate; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Tag; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; @@ -17,6 +18,7 @@ import com.linkedin.metadata.key.TagKey; import com.linkedin.tag.TagProperties; import javax.annotation.Nonnull; +import javax.annotation.Nullable; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. @@ -27,12 +29,14 @@ public class TagMapper implements ModelMapper { public static final TagMapper INSTANCE = new TagMapper(); - public static Tag map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static Tag map( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(context, entityResponse); } @Override - public Tag apply(@Nonnull final EntityResponse entityResponse) { + public Tag apply( + @Nullable final QueryContext context, @Nonnull final EntityResponse entityResponse) { final Tag result = new Tag(); Urn entityUrn = entityResponse.getUrn(); result.setUrn(entityResponse.getUrn().toString()); @@ -43,11 +47,12 @@ public Tag apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); - mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); + mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, TagMapper::mapTagKey); + mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, TagMapper::mapTagProperties); mappingHelper.mapToResult( OWNERSHIP_ASPECT_NAME, - (tag, dataMap) -> tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + (tag, dataMap) -> + tag.setOwnership(OwnershipMapper.map(context, new Ownership(dataMap), entityUrn))); if (result.getProperties() != null && result.getProperties().getName() == null) { result.getProperties().setName(legacyName); @@ -56,12 +61,12 @@ public Tag apply(@Nonnull final EntityResponse entityResponse) { return mappingHelper.getResult(); } - private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + private static void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { TagKey tagKey = new TagKey(dataMap); tag.setName(tagKey.getName()); } - private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + private static void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { final TagProperties properties = new TagProperties(dataMap); final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = new com.linkedin.datahub.graphql.generated.TagProperties.Builder() diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java index 316994881ccfe..7e6b7052d683d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java @@ -13,6 +13,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.TagUpdateInput; import com.linkedin.datahub.graphql.types.common.mappers.util.UpdateMappingHelper; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; @@ -21,6 +22,7 @@ import java.util.ArrayList; import java.util.Collection; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class TagUpdateInputMapper implements InputModelMapper, Urn> { @@ -28,13 +30,17 @@ public class TagUpdateInputMapper public static final TagUpdateInputMapper INSTANCE = new TagUpdateInputMapper(); public static Collection map( - @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { - return INSTANCE.apply(tagUpdate, actor); + @Nullable final QueryContext context, + @Nonnull final TagUpdateInput tagUpdate, + @Nonnull final Urn actor) { + return INSTANCE.apply(context, tagUpdate, actor); } @Override public Collection apply( - @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { + @Nullable final QueryContext context, + @Nonnull final TagUpdateInput tagUpdate, + @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(2); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(TAG_ENTITY_NAME); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java index eefcc356c22a3..8608dde75628c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java @@ -57,7 +57,7 @@ public List> batchLoad( ASPECTS_TO_FETCH, context.getAuthentication()); - final List gmsResults = new ArrayList<>(); + final List gmsResults = new ArrayList<>(urns.size()); for (Urn urn : testUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java index e4e67c86f1ae6..1bfeeaeea7c36 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java @@ -1,8 +1,10 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.FieldUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class FieldUsageCountsMapper implements ModelMapper { @@ -10,12 +12,15 @@ public class FieldUsageCountsMapper public static final FieldUsageCountsMapper INSTANCE = new FieldUsageCountsMapper(); public static FieldUsageCounts map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { - return INSTANCE.apply(usageCounts); + return INSTANCE.apply(context, usageCounts); } @Override - public FieldUsageCounts apply(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + public FieldUsageCounts apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { FieldUsageCounts result = new FieldUsageCounts(); result.setCount(usageCounts.getCount()); result.setFieldName(usageCounts.getFieldName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java index 3449c6782a46b..32ba8f5b80325 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageAggregation; import com.linkedin.datahub.graphql.generated.WindowDuration; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageAggregationMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class UsageAggregationMapper public static final UsageAggregationMapper INSTANCE = new UsageAggregationMapper(); public static UsageAggregation map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { - return INSTANCE.apply(pdlUsageAggregation); + return INSTANCE.apply(context, pdlUsageAggregation); } @Override public UsageAggregation apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { UsageAggregation result = new UsageAggregation(); result.setBucket(pdlUsageAggregation.getBucket()); @@ -28,7 +32,8 @@ public UsageAggregation apply( result.setResource(pdlUsageAggregation.getResource().toString()); } if (pdlUsageAggregation.hasMetrics()) { - result.setMetrics(UsageAggregationMetricsMapper.map(pdlUsageAggregation.getMetrics())); + result.setMetrics( + UsageAggregationMetricsMapper.map(context, pdlUsageAggregation.getMetrics())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java index ff9f6fd5c4855..47411d65c7329 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageAggregationMetrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageAggregationMetricsMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class UsageAggregationMetricsMapper public static final UsageAggregationMetricsMapper INSTANCE = new UsageAggregationMetricsMapper(); public static UsageAggregationMetrics map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { - return INSTANCE.apply(usageAggregationMetrics); + return INSTANCE.apply(context, usageAggregationMetrics); } @Override public UsageAggregationMetrics apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { UsageAggregationMetrics result = new UsageAggregationMetrics(); result.setTotalSqlQueries(usageAggregationMetrics.getTotalSqlQueries()); @@ -25,13 +29,13 @@ public UsageAggregationMetrics apply( if (usageAggregationMetrics.hasFields()) { result.setFields( usageAggregationMetrics.getFields().stream() - .map(FieldUsageCountsMapper::map) + .map(f -> FieldUsageCountsMapper.map(context, f)) .collect(Collectors.toList())); } if (usageAggregationMetrics.hasUsers()) { result.setUsers( usageAggregationMetrics.getUsers().stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .map(aggregation -> UserUsageCountsMapper.map(context, aggregation)) .collect(Collectors.toList())); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java index 63fe051b7ede9..c40126ca32551 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageQueryResultAggregations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageQueryResultAggregationMapper implements ModelMapper< @@ -13,12 +15,14 @@ public class UsageQueryResultAggregationMapper new UsageQueryResultAggregationMapper(); public static UsageQueryResultAggregations map( + @Nullable final QueryContext context, @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { - return INSTANCE.apply(pdlUsageResultAggregations); + return INSTANCE.apply(context, pdlUsageResultAggregations); } @Override public UsageQueryResultAggregations apply( + @Nullable final QueryContext context, @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { UsageQueryResultAggregations result = new UsageQueryResultAggregations(); result.setTotalSqlQueries(pdlUsageResultAggregations.getTotalSqlQueries()); @@ -26,13 +30,13 @@ public UsageQueryResultAggregations apply( if (pdlUsageResultAggregations.hasFields()) { result.setFields( pdlUsageResultAggregations.getFields().stream() - .map(FieldUsageCountsMapper::map) + .map(f -> FieldUsageCountsMapper.map(context, f)) .collect(Collectors.toList())); } if (pdlUsageResultAggregations.hasUsers()) { result.setUsers( pdlUsageResultAggregations.getUsers().stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .map(aggregation -> UserUsageCountsMapper.map(context, aggregation)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index b2758adc9b8b2..eef476959c5fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,11 +1,13 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UsageQueryResult; import com.linkedin.datahub.graphql.generated.UsageQueryResultAggregations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.List; import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UsageQueryResultMapper implements ModelMapper { @@ -16,21 +18,24 @@ public class UsageQueryResultMapper public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); public static UsageQueryResult map( + @Nullable final QueryContext context, @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { - return INSTANCE.apply(pdlUsageResult); + return INSTANCE.apply(context, pdlUsageResult); } @Override - public UsageQueryResult apply(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + public UsageQueryResult apply( + @Nullable QueryContext context, + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { UsageQueryResult result = new UsageQueryResult(); if (pdlUsageResult.hasAggregations()) { result.setAggregations( - UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); + UsageQueryResultAggregationMapper.map(context, pdlUsageResult.getAggregations())); } if (pdlUsageResult.hasBuckets()) { result.setBuckets( pdlUsageResult.getBuckets().stream() - .map(bucket -> UsageAggregationMapper.map(bucket)) + .map(bucket -> UsageAggregationMapper.map(context, bucket)) .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java index 014003dd86554..783d44d486368 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java @@ -1,9 +1,11 @@ package com.linkedin.datahub.graphql.types.usage; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +import javax.annotation.Nullable; public class UserUsageCountsMapper implements ModelMapper { @@ -11,12 +13,14 @@ public class UserUsageCountsMapper public static final UserUsageCountsMapper INSTANCE = new UserUsageCountsMapper(); public static UserUsageCounts map( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { - return INSTANCE.apply(pdlUsageResultAggregations); + return INSTANCE.apply(context, pdlUsageResultAggregations); } @Override public UserUsageCounts apply( + @Nullable QueryContext context, @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { UserUsageCounts result = new UserUsageCounts(); if (pdlUsageResultAggregations.hasUser()) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index a4bbd685fd4a2..be27f9b0f3c01 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.*; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewDefinition; import com.linkedin.datahub.graphql.generated.DataHubViewFilter; @@ -32,12 +33,14 @@ public class DataHubViewMapper implements ModelMapper> batchLoad( gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataHubViewMapper.map(gmsResult)) + .data(DataHubViewMapper.map(context, gmsResult)) .build()) .collect(Collectors.toList()); } catch (Exception e) { diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index e8c968ab768f8..6608f454ae8c6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -54,11 +54,9 @@ public static QueryContext getMockAllowContext(String actorUrn) { new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds"); Mockito.when(mockContext.getAuthentication()).thenReturn(authentication); OperationContext operationContext = - TestOperationContexts.userContextNoSearchAuthorization( - mock(EntityRegistry.class), mockAuthorizer, authentication); + TestOperationContexts.userContextNoSearchAuthorization(mockAuthorizer, authentication); Mockito.when(mockContext.getOperationContext()).thenReturn(operationContext); - Mockito.when(mockContext.getOperationContext()) - .thenReturn(Mockito.mock(OperationContext.class)); + return mockContext; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java index b4c58ca182b2f..6b0cda2957be6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -18,7 +19,9 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.registry.EntityRegistry; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -31,6 +34,9 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java index 4c8ceff9c4f80..a0a9c984897e2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.mock; import static org.testng.Assert.assertEquals; import com.datahub.authentication.Authentication; @@ -15,7 +16,9 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.models.registry.EntityRegistry; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -28,6 +31,9 @@ public void testGetSuccessForDomain() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java index 446f58bec73aa..a0430a9d75827 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java @@ -3,6 +3,7 @@ import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -20,7 +21,9 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.models.registry.EntityRegistry; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -33,6 +36,9 @@ public void testGetSuccessForTerm() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -140,6 +146,9 @@ public void testGetSuccessForNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java index 3de88333b959d..f3e27d91f39df 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java @@ -4,8 +4,10 @@ import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; +import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.metadata.Constants; import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -19,7 +21,9 @@ public void testCanManageIngestionAuthorized() throws Exception { AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", "MANAGE_INGESTION", Optional.empty()); + "urn:li:corpuser:authorized", + "MANAGE_INGESTION", + Optional.of(new EntitySpec(Constants.INGESTION_SOURCE_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -38,7 +42,9 @@ public void testCanManageIngestionUnauthorized() throws Exception { AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", "MANAGE_INGESTION", Optional.empty()); + "urn:li:corpuser:unauthorized", + "MANAGE_INGESTION", + Optional.of(new EntitySpec(Constants.INGESTION_SOURCE_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); @@ -56,7 +62,10 @@ public void testCanManageSecretsAuthorized() throws Exception { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); AuthorizationRequest request = - new AuthorizationRequest("urn:li:corpuser:authorized", "MANAGE_SECRETS", Optional.empty()); + new AuthorizationRequest( + "urn:li:corpuser:authorized", + "MANAGE_SECRETS", + Optional.of(new EntitySpec(Constants.SECRETS_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -75,7 +84,9 @@ public void testCanManageSecretsUnauthorized() throws Exception { AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", "MANAGE_SECRETS", Optional.empty()); + "urn:li:corpuser:unauthorized", + "MANAGE_SECRETS", + Optional.of(new EntitySpec(Constants.SECRETS_ENTITY_NAME, ""))); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java index eafdfde364947..ae14b75cc374c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java @@ -13,12 +13,12 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataHubSecretKey; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import org.mockito.Mockito; import org.testng.annotations.Test; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java index 495adb27dbd5d..effec8662a9aa 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java @@ -16,10 +16,10 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.r2.RemoteInvocationException; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.HashSet; import java.util.List; import org.mockito.Mockito; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java index 73d228d600266..0154a94c56a51 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/UpdateSecretResolverTest.java @@ -19,9 +19,9 @@ import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.secret.DataHubSecretValue; import graphql.schema.DataFetchingEnvironment; +import io.datahubproject.metadata.services.SecretService; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 05387123f9c96..bed8bf3568242 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -74,6 +74,7 @@ public class MutableTypeBatchResolverTest { @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + BatchMutableType batchMutableType = new DatasetType(mockClient); @@ -167,6 +168,7 @@ public void testGetSuccess() throws Exception { @Test public void testGetFailureUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + BatchMutableType batchMutableType = new DatasetType(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index ea0765ba9377c..5acd91be1001f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -74,6 +74,7 @@ public static void testAutoCompleteResolverSuccess( @Test public static void testAutoCompleteResolverSuccessForDifferentEntities() throws Exception { ViewService viewService = initMockViewService(null, null); + // Daatasets EntityClient mockClient = initMockEntityClient( @@ -140,6 +141,7 @@ public static void testAutoCompleteResolverSuccessForDifferentEntities() throws public static void testAutoCompleteResolverWithViewFilter() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); + EntityClient mockClient = initMockEntityClient( Constants.DATASET_ENTITY_NAME, @@ -204,6 +206,7 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti public static void testAutoCompleteResolverFailNoQuery() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); ViewService viewService = initMockViewService(null, null); + final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver( ImmutableList.of(new DatasetType(mockClient)), viewService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index 1f038427c9aaa..25e374c766deb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -259,7 +259,7 @@ private static QuickFilter createQuickFilter( quickFilter.setField(field); quickFilter.setValue(value); if (entityUrn != null) { - quickFilter.setEntity(UrnToEntityMapper.map(UrnUtils.getUrn(entityUrn))); + quickFilter.setEntity(UrnToEntityMapper.map(null, UrnUtils.getUrn(entityUrn))); } return quickFilter; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java index c975c7ebb0507..82a4722985896 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.assertion; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -23,8 +24,10 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.AssertionKey; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -87,6 +90,10 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java index 54b341fc1865a..ef69278df61a7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapperTest.java @@ -8,7 +8,8 @@ public class EmbedMapperTest { @Test public void testEmbedMapper() throws Exception { final String renderUrl = "https://www.google.com"; - final Embed result = EmbedMapper.map(new com.linkedin.common.Embed().setRenderUrl(renderUrl)); + final Embed result = + EmbedMapper.map(null, new com.linkedin.common.Embed().setRenderUrl(renderUrl)); Assert.assertEquals(result.getRenderUrl(), renderUrl); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java index 1e2acd0db455c..15ebc975063da 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.container; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -40,8 +41,10 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.ContainerKey; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -102,7 +105,7 @@ public class ContainerTypeTest { @Test public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); + EntityClient client = mock(EntityClient.class); Urn containerUrn1 = Urn.createFromString(TEST_CONTAINER_1_URN); Urn containerUrn2 = Urn.createFromString(TEST_CONTAINER_2_URN); @@ -157,8 +160,12 @@ public void testBatchLoad() throws Exception { ContainerType type = new ContainerType(client); - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + QueryContext mockContext = mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); @@ -200,7 +207,7 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityClient mockClient = mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) .batchGetV2( @@ -211,8 +218,8 @@ public void testBatchLoadClientException() throws Exception { ContainerType type = new ContainerType(mockClient); // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + QueryContext context = mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(mock(Authentication.class)); assertThrows( RuntimeException.class, () -> diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java index 667d943b1095d..1bd1f96a7efbd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java @@ -35,6 +35,7 @@ import com.linkedin.metadata.key.DataPlatformInstanceKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -142,6 +143,9 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.userContextNoSearchAuthorization(TEST_ACTOR_URN)); + List> result = type.batchLoad( ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index b28dd287e3fe4..8bfbdbe282ad6 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -50,7 +50,7 @@ public void testDatasetPropertiesMapperWithCreatedAndLastModified() { .setEntityName(Constants.DATASET_ENTITY_NAME) .setUrn(TEST_DATASET_URN) .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); + final Dataset actual = DatasetMapper.map(null, response); final Dataset expected = new Dataset(); expected.setUrn(TEST_DATASET_URN.toString()); @@ -100,7 +100,7 @@ public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { .setEntityName(Constants.DATASET_ENTITY_NAME) .setUrn(TEST_DATASET_URN) .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); + final Dataset actual = DatasetMapper.map(null, response); final Dataset expected = new Dataset(); expected.setUrn(TEST_DATASET_URN.toString()); @@ -154,7 +154,7 @@ public void testDatasetPropertiesMapperWithoutTimestampActors() { .setEntityName(Constants.DATASET_ENTITY_NAME) .setUrn(TEST_DATASET_URN) .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); + final Dataset actual = DatasetMapper.map(null, response); final Dataset expected = new Dataset(); expected.setUrn(TEST_DATASET_URN.toString()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java index 612136d1f9164..42220091f5853 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java @@ -48,7 +48,7 @@ public void testMapperFullProfile() { .setSampleValues(new StringArray(ImmutableList.of("val3", "val4")))))); final EnvelopedAspect inputAspect = new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); - final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); + final DatasetProfile actual = DatasetProfileMapper.map(null, inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); @@ -167,7 +167,7 @@ public void testMapperPartialProfile() { .setUniqueProportion(40.5f)))); final EnvelopedAspect inputAspect = new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); - final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); + final DatasetProfile actual = DatasetProfileMapper.map(null, inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java index d637f873533ef..8cce03389debb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentMapperTest.java @@ -68,7 +68,7 @@ public void testMap() throws Exception { new EnvelopedAspectMap( Collections.singletonMap(Constants.INCIDENT_INFO_ASPECT_NAME, envelopedIncidentInfo))); - Incident incident = IncidentMapper.map(entityResponse); + Incident incident = IncidentMapper.map(null, entityResponse); assertNotNull(incident); assertEquals(incident.getUrn(), "urn:li:incident:1"); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java index ad787f29e8b2a..6f06d20c97227 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/incident/IncidentTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.incident; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -25,8 +26,10 @@ import com.linkedin.incident.IncidentType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.IncidentKey; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -107,6 +110,10 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_INCIDENT_URN, TEST_INCIDENT_URN_2), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java index 5e489d7da0f7c..927d5185a71c7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/mappers/MapperUtilsTest.java @@ -1,15 +1,19 @@ package com.linkedin.datahub.graphql.types.mappers; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertThrows; import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.annotation.PathSpecBasedSchemaAnnotationVisitor; +import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.snapshot.Snapshot; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.List; import org.testng.annotations.BeforeTest; @@ -40,9 +44,13 @@ public void testMatchedFieldValidation() throws URISyntaxException { IllegalArgumentException.class, () -> ValidationUtils.validateUrn(entityRegistry, invalidUrn)); + QueryContext mockContext = mock(QueryContext.class); + when(mockContext.getOperationContext()) + .thenReturn(TestOperationContexts.systemContextNoSearchAuthorization(entityRegistry)); + List actualMatched = MapperUtils.getMatchedFieldEntry( - entityRegistry, + mockContext, List.of( buildSearchMatchField(urn.toString()), buildSearchMatchField(invalidUrn.toString()))); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java index f88c8285e20df..b5862ef2f8fea 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.notebook; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -45,6 +46,7 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.NotebookKey; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.notebook.EditableNotebookProperties; import com.linkedin.notebook.NotebookCell; import com.linkedin.notebook.NotebookCellArray; @@ -54,6 +56,7 @@ import com.linkedin.notebook.TextCell; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -193,6 +196,10 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java index c8f694320d88a..dcf81dac3fbd9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.query; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -18,6 +19,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.query.QueryLanguage; import com.linkedin.query.QueryProperties; import com.linkedin.query.QuerySource; @@ -27,6 +29,7 @@ import com.linkedin.query.QuerySubjects; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -124,6 +127,10 @@ public void testBatchLoad() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad( ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); @@ -178,6 +185,10 @@ public void testBatchLoadNullEntity() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad( ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java index f02fd38e2ca7c..557a77601b42c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.types.view; +import static org.mockito.Mockito.mock; import static org.testng.Assert.*; import com.datahub.authentication.Authentication; @@ -21,6 +22,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -32,6 +34,7 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.execution.DataFetcherResult; +import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -179,6 +182,10 @@ public void testBatchLoadValidView() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); @@ -245,6 +252,10 @@ public void testBatchLoadInvalidView() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getOperationContext()) + .thenReturn( + TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class))); + List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); diff --git a/datahub-upgrade/build.gradle b/datahub-upgrade/build.gradle index 8ee07db0fde30..9108f3009b4ba 100644 --- a/datahub-upgrade/build.gradle +++ b/datahub-upgrade/build.gradle @@ -14,7 +14,7 @@ ext { dependencies { implementation project(':metadata-io') implementation project(':metadata-service:factories') - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation project(':metadata-service:configuration') implementation project(':metadata-dao-impl:kafka-producer') implementation externalDependency.charle diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillOwnershipTypesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillOwnershipTypesConfig.java deleted file mode 100644 index 3ca397a8ce268..0000000000000 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillOwnershipTypesConfig.java +++ /dev/null @@ -1,28 +0,0 @@ -package com.linkedin.datahub.upgrade.config; - -import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; -import com.linkedin.datahub.upgrade.system.ownershiptypes.OwnershipTypes; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.search.SearchService; -import io.datahubproject.metadata.context.OperationContext; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Conditional; -import org.springframework.context.annotation.Configuration; - -@Configuration -@Conditional(SystemUpdateCondition.NonBlockingSystemUpdateCondition.class) -public class BackfillOwnershipTypesConfig { - - @Bean - public NonBlockingSystemUpgrade backfillOwnershipTypes( - final OperationContext opContext, - final EntityService entityService, - final SearchService searchService, - @Value("${systemUpdate.ownershipTypes.enabled}") final boolean enabled, - @Value("${systemUpdate.ownershipTypes.reprocess.enabled}") final boolean reprocessEnabled, - @Value("${systemUpdate.ownershipTypes.batchSize}") final Integer batchSize) { - return new OwnershipTypes( - opContext, entityService, searchService, enabled, reprocessEnabled, batchSize); - } -} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypes.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypes.java deleted file mode 100644 index 63aacde7ef8ab..0000000000000 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypes.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.linkedin.datahub.upgrade.system.ownershiptypes; - -import com.google.common.collect.ImmutableList; -import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.NonBlockingSystemUpgrade; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.search.SearchService; -import io.datahubproject.metadata.context.OperationContext; -import java.util.List; - -public class OwnershipTypes implements NonBlockingSystemUpgrade { - - private final List _steps; - - public OwnershipTypes( - OperationContext opContext, - EntityService entityService, - SearchService searchService, - boolean enabled, - boolean reprocessEnabled, - Integer batchSize) { - if (enabled) { - _steps = - ImmutableList.of( - new OwnershipTypesStep( - opContext, entityService, searchService, enabled, reprocessEnabled, batchSize)); - } else { - _steps = ImmutableList.of(); - } - } - - @Override - public String id() { - return getClass().getSimpleName(); - } - - @Override - public List steps() { - return _steps; - } -} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypesStep.java deleted file mode 100644 index 4c55f4ddcb31d..0000000000000 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/ownershiptypes/OwnershipTypesStep.java +++ /dev/null @@ -1,276 +0,0 @@ -package com.linkedin.datahub.upgrade.system.ownershiptypes; - -import static com.linkedin.metadata.Constants.DATA_HUB_UPGRADE_RESULT_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.linkedin.common.AuditStamp; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.upgrade.UpgradeContext; -import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.UpgradeStepResult; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; -import com.linkedin.entity.Aspect; -import com.linkedin.events.metadata.ChangeType; -import com.linkedin.metadata.Constants; -import com.linkedin.metadata.aspect.batch.AspectsBatch; -import com.linkedin.metadata.boot.BootstrapStep; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; -import com.linkedin.metadata.query.filter.Condition; -import com.linkedin.metadata.query.filter.ConjunctiveCriterion; -import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; -import com.linkedin.metadata.query.filter.CriterionArray; -import com.linkedin.metadata.query.filter.Filter; -import com.linkedin.metadata.search.ScrollResult; -import com.linkedin.metadata.search.SearchEntity; -import com.linkedin.metadata.search.SearchEntityArray; -import com.linkedin.metadata.search.SearchService; -import com.linkedin.metadata.utils.AuditStampUtils; -import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.mxe.MetadataChangeProposal; -import com.linkedin.mxe.SystemMetadata; -import io.datahubproject.metadata.context.OperationContext; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class OwnershipTypesStep implements UpgradeStep { - - private static final String UPGRADE_ID = OwnershipTypes.class.getSimpleName(); - private static final Urn UPGRADE_ID_URN = BootstrapStep.getUpgradeUrn(UPGRADE_ID); - - private static final Set ENTITY_TYPES_TO_MIGRATE = - ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME, - Constants.ML_PRIMARY_KEY_ENTITY_NAME, - Constants.GLOSSARY_TERM_ENTITY_NAME, - Constants.GLOSSARY_NODE_ENTITY_NAME, - Constants.TAG_ENTITY_NAME, - Constants.ROLE_ENTITY_NAME, - Constants.CORP_GROUP_ENTITY_NAME, - Constants.CORP_USER_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME, - Constants.DOMAIN_ENTITY_NAME, - Constants.DATA_PRODUCT_ENTITY_NAME, - Constants.NOTEBOOK_ENTITY_NAME); - - private final OperationContext opContext; - private final EntityService entityService; - private final SearchService searchService; - private final boolean enabled; - private final boolean reprocessEnabled; - private final Integer batchSize; - - public OwnershipTypesStep( - OperationContext opContext, - EntityService entityService, - SearchService searchService, - boolean enabled, - boolean reprocessEnabled, - Integer batchSize) { - this.opContext = opContext; - this.entityService = entityService; - this.searchService = searchService; - this.enabled = enabled; - this.reprocessEnabled = reprocessEnabled; - this.batchSize = batchSize; - } - - @Override - public Function executable() { - return (context) -> { - final AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp(); - - String scrollId = null; - for (String entityType : ENTITY_TYPES_TO_MIGRATE) { - int migratedCount = 0; - do { - log.info( - String.format( - "Upgrading batch %s-%s of browse paths for entity type %s", - migratedCount, migratedCount + batchSize, entityType)); - scrollId = ownershipTypes(entityType, auditStamp, scrollId); - migratedCount += batchSize; - } while (scrollId != null); - } - - BootstrapStep.setUpgradeResult(UPGRADE_ID_URN, entityService); - - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); - }; - } - - private String ownershipTypes(String entityType, AuditStamp auditStamp, String scrollId) { - - final Filter filter; - - if (reprocessEnabled) { - filter = backfillDefaultOwnershipTypesFilter(); - } else { - filter = backfillOwnershipTypesFilter(); - } - - final ScrollResult scrollResult = - searchService.scrollAcrossEntities( - opContext.withSearchFlags( - flags -> - flags - .setFulltext(true) - .setSkipCache(true) - .setSkipHighlighting(true) - .setSkipAggregates(true)), - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - null, - batchSize); - - if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { - return null; - } - - try { - ingestOwnershipTypes(scrollResult.getEntities(), auditStamp); - } catch (Exception e) { - // don't stop the whole step because of one bad urn or one bad ingestion - log.error( - String.format( - "Error ingesting ownership aspect for urn %s", - scrollResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - e); - } - - return scrollResult.getScrollId(); - } - - private Filter backfillOwnershipTypesFilter() { - // Condition: has `owners` AND does NOT have `ownershipTypes` - Criterion hasOwners = new Criterion(); - hasOwners.setCondition(Condition.EXISTS); - hasOwners.setField("owners"); - // Excludes entities with ownershipTypes - Criterion missingOwnershipTypes = new Criterion(); - missingOwnershipTypes.setCondition(Condition.IS_NULL); - missingOwnershipTypes.setField("ownershipTypes"); - - CriterionArray criterionArray = new CriterionArray(); - criterionArray.add(hasOwners); - criterionArray.add(missingOwnershipTypes); - - ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); - conjunctiveCriterion.setAnd(criterionArray); - - ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); - conjunctiveCriterionArray.add(conjunctiveCriterion); - - Filter filter = new Filter(); - filter.setOr(conjunctiveCriterionArray); - return filter; - } - - private Filter backfillDefaultOwnershipTypesFilter() { - // Condition: has `owners` - Criterion hasOwners = new Criterion(); - hasOwners.setCondition(Condition.EXISTS); - hasOwners.setField("owners"); - - CriterionArray criterionArray = new CriterionArray(); - criterionArray.add(hasOwners); - - ConjunctiveCriterion conjunctiveCriterion = new ConjunctiveCriterion(); - conjunctiveCriterion.setAnd(criterionArray); - - ConjunctiveCriterionArray conjunctiveCriterionArray = new ConjunctiveCriterionArray(); - conjunctiveCriterionArray.add(conjunctiveCriterion); - - Filter filter = new Filter(); - filter.setOr(conjunctiveCriterionArray); - return filter; - } - - private void ingestOwnershipTypes(SearchEntityArray searchBatch, AuditStamp auditStamp) - throws Exception { - Map> existing = - entityService.getLatestAspectObjects( - searchBatch.stream().map(SearchEntity::getEntity).collect(Collectors.toSet()), - Set.of(Constants.OWNERSHIP_ASPECT_NAME)); - - List mcps = - existing.entrySet().stream() - .filter(result -> result.getValue().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) - .map( - result -> { - MetadataChangeProposal proposal = new MetadataChangeProposal(); - proposal.setEntityUrn(result.getKey()); - proposal.setEntityType(result.getKey().getEntityType()); - proposal.setAspectName(Constants.OWNERSHIP_ASPECT_NAME); - proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata( - new SystemMetadata() - .setRunId(DEFAULT_RUN_ID) - .setLastObserved(System.currentTimeMillis())); - proposal.setAspect( - GenericRecordUtils.serializeAspect( - result.getValue().get(Constants.OWNERSHIP_ASPECT_NAME))); - return proposal; - }) - .collect(Collectors.toList()); - - log.debug(String.format("Reingesting ownership for %s urns", mcps.size())); - AspectsBatch batch = AspectsBatchImpl.builder().mcps(mcps, auditStamp, entityService).build(); - - entityService.ingestProposal(batch, false); - } - - @Override - public String id() { - return UPGRADE_ID; - } - - /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum - * retries. - */ - @Override - public boolean isOptional() { - return true; - } - - @Override - /** - * Returns whether the upgrade should be skipped. Uses previous run history or the environment - * variables to determine whether to skip. - */ - public boolean skip(UpgradeContext context) { - if (reprocessEnabled && enabled) { - return false; - } - - boolean previouslyRun = - entityService.exists(UPGRADE_ID_URN, DATA_HUB_UPGRADE_RESULT_ASPECT_NAME, true); - - if (previouslyRun) { - log.info("{} was already run. Skipping.", id()); - } - return (previouslyRun || !enabled); - } -} diff --git a/datahub-web-react/src/app/entity/shared/tabs/Entity/DataJobFlowTab.tsx b/datahub-web-react/src/app/entity/shared/tabs/Entity/DataJobFlowTab.tsx index f842f1adb9c90..1371a0aeb499c 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Entity/DataJobFlowTab.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Entity/DataJobFlowTab.tsx @@ -10,5 +10,5 @@ export const DataJobFlowTab = () => { const dataFlow = dataJob?.dataFlow; const entityRegistry = useEntityRegistry(); const title = `Part of ${entityRegistry.getEntityName(EntityType.DataFlow)}`; - return ; + return ; }; diff --git a/docker/profiles/docker-compose.actions.yml b/docker/profiles/docker-compose.actions.yml index 99531d5057539..45c1d928443f8 100644 --- a/docker/profiles/docker-compose.actions.yml +++ b/docker/profiles/docker-compose.actions.yml @@ -2,7 +2,10 @@ x-datahub-actions-service: &datahub-actions-service hostname: actions image: ${DATAHUB_ACTIONS_IMAGE:-${DATAHUB_ACTIONS_REPO:-acryldata}/datahub-actions}:${ACTIONS_VERSION:-v0.0.14} - env_file: datahub-actions/env/docker.env + env_file: + - datahub-actions/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-actions/env/docker.env} + - ${DATAHUB_LOCAL_ACTIONS_ENV:-datahub-actions/env/docker.env} environment: ACTIONS_EXTRA_PACKAGES: ${ACTIONS_EXTRA_PACKAGES:-} ACTIONS_CONFIG: ${ACTIONS_CONFIG:-} diff --git a/docker/profiles/docker-compose.frontend.yml b/docker/profiles/docker-compose.frontend.yml index d77fe57319f67..4b2e7417fa61c 100644 --- a/docker/profiles/docker-compose.frontend.yml +++ b/docker/profiles/docker-compose.frontend.yml @@ -4,7 +4,10 @@ x-datahub-frontend-service: &datahub-frontend-service image: ${DATAHUB_FRONTEND_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-frontend-react}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_FRONTEND_PORT:-9002}:9002 - env_file: datahub-frontend/env/docker.env + env_file: + - datahub-frontend/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-frontend/env/docker.env} + - ${DATAHUB_LOCAL_FRONTEND_ENV:-datahub-frontend/env/docker.env} environment: &datahub-frontend-service-env KAFKA_BOOTSTRAP_SERVER: broker:29092 volumes: diff --git a/docker/profiles/docker-compose.gms.yml b/docker/profiles/docker-compose.gms.yml index 52fb3c1e9abe0..961bd4464af95 100644 --- a/docker/profiles/docker-compose.gms.yml +++ b/docker/profiles/docker-compose.gms.yml @@ -58,7 +58,10 @@ x-datahub-system-update-service: &datahub-system-update-service command: - -u - SystemUpdate - env_file: datahub-upgrade/env/docker.env + env_file: + - datahub-upgrade/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-upgrade/env/docker.env} + - ${DATAHUB_LOCAL_SYS_UPDATE_ENV:-datahub-upgrade/env/docker.env} environment: &datahub-system-update-env <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] SCHEMA_REGISTRY_SYSTEM_UPDATE: ${SCHEMA_REGISTRY_SYSTEM_UPDATE:-true} @@ -90,7 +93,10 @@ x-datahub-gms-service: &datahub-gms-service image: ${DATAHUB_GMS_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-gms}:${DATAHUB_VERSION:-head} ports: - ${DATAHUB_MAPPED_GMS_PORT:-8080}:8080 - env_file: datahub-gms/env/docker.env + env_file: + - datahub-gms/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-gms/env/docker.env} + - ${DATAHUB_LOCAL_GMS_ENV:-datahub-gms/env/docker.env} environment: &datahub-gms-env <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] healthcheck: @@ -113,7 +119,6 @@ x-datahub-gms-service-dev: &datahub-gms-service-dev environment: &datahub-gms-dev-env <<: [*datahub-dev-telemetry-env, *datahub-gms-env] SKIP_ELASTICSEARCH_CHECK: false - METADATA_SERVICE_AUTH_ENABLED: false JAVA_TOOL_OPTIONS: '-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5001' BOOTSTRAP_SYSTEM_UPDATE_WAIT_FOR_SYSTEM_UPDATE: false SEARCH_SERVICE_ENABLE_CACHE: false @@ -135,7 +140,10 @@ x-datahub-mae-consumer-service: &datahub-mae-consumer-service image: ${DATAHUB_MAE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mae-consumer}:${DATAHUB_VERSION:-head} ports: - 9091:9091 - env_file: datahub-mae-consumer/env/docker.env + env_file: + - datahub-mae-consumer/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-mae-consumer/env/docker.env} + - ${DATAHUB_LOCAL_MAE_ENV:-datahub-mae-consumer/env/docker.env} environment: &datahub-mae-consumer-env <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *kafka-env] @@ -158,7 +166,10 @@ x-datahub-mce-consumer-service: &datahub-mce-consumer-service image: ${DATAHUB_MCE_CONSUMER_IMAGE:-${DATAHUB_REPO:-linkedin}/datahub-mce-consumer}:${DATAHUB_VERSION:-head} ports: - 9090:9090 - env_file: datahub-mce-consumer/env/docker.env + env_file: + - datahub-mce-consumer/env/docker.env + - ${DATAHUB_LOCAL_COMMON_ENV:-datahub-mce-consumer/env/docker.env} + - ${DATAHUB_LOCAL_MCE_ENV:-datahub-mce-consumer/env/docker.env} environment: &datahub-mce-consumer-env <<: [*primary-datastore-mysql-env, *graph-datastore-search-env, *search-datastore-env, *datahub-quickstart-telemetry-env, *kafka-env] diff --git a/docs/actions/events/metadata-change-log-event.md b/docs/actions/events/metadata-change-log-event.md index cf2069cf2977b..56f41ddb2ed6c 100644 --- a/docs/actions/events/metadata-change-log-event.md +++ b/docs/actions/events/metadata-change-log-event.md @@ -19,7 +19,7 @@ The fields include | entityUrn | String | The unique identifier for the Entity being changed. For example, a Dataset's urn. | False | | entityType | String | The type of the entity being changed. Supported values include dataset, chart, dashboard, dataFlow (Pipeline), dataJob (Task), domain, tag, glossaryTerm, corpGroup, & corpUser. | False | | entityKeyAspect | Object | The key struct of the entity that was changed. Only present if the Metadata Change Proposal contained the raw key struct. | True | -| changeType | String | The change type. UPSERT or DELETE are currently supported. | False | +| changeType | String | The change type. UPSERT, DELETE, CREATE, RESTATE are currently supported. | False | | aspectName | String | The entity aspect which was changed. | False | | aspect | Object | The new aspect value. Null if the aspect was deleted. | True | | aspect.contentType | String | The serialization type of the aspect itself. The only supported value is `application/json`. | False | @@ -31,7 +31,7 @@ The fields include | previousSystemMetadata | Object | The previous system metadata. This includes the the ingestion run-id, model registry and more. For the full structure, see https://github.com/datahub-project/datahub/blob/master/metadata-models/src/main/pegasus/com/linkedin/mxe/SystemMetadata.pdl | True | | created | Object | Audit stamp about who triggered the Metadata Change and when. | False | | created.time | Number | The timestamp in milliseconds when the aspect change occurred. | False | -| created.actor | String | The URN of the actor (e.g. corpuser) that triggered the change. +| created.actor | String | The URN of the actor (e.g. corpuser) that triggered the change. ### Sample Events diff --git a/docs/advanced/mcp-mcl.md b/docs/advanced/mcp-mcl.md index 5a9052c19155b..235c9a85ec8da 100644 --- a/docs/advanced/mcp-mcl.md +++ b/docs/advanced/mcp-mcl.md @@ -87,7 +87,7 @@ Each proposal comprises of the following: - DELETE: Delete - PATCH: Patch the aspect instead of doing a full replace - Only UPSERT is supported as of now. + Only UPSERT, CREATE, DELETE, PATCH are supported as of now. 5. aspectName diff --git a/docs/api/openapi/openapi-usage-guide.md b/docs/api/openapi/openapi-usage-guide.md index be8961a08edf7..f33c20c91dacb 100644 --- a/docs/api/openapi/openapi-usage-guide.md +++ b/docs/api/openapi/openapi-usage-guide.md @@ -48,13 +48,16 @@ Even lower-level API-s that allow you to write metadata events into the DataHub #### Entities (/entities) endpoint -##### POST +##### POST (UPSERT) + +A post without any additional URL parameters performs an UPSERT of entity's aspects. The entity will be +created if it doesn't exist or updated if it does. ```shell curl --location --request POST 'localhost:8080/openapi/entities/v1/' \ --header 'Content-Type: application/json' \ --header 'Accept: application/json' \ ---header 'Authorization: Bearer eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIiwiYWN0b3JJZCI6ImRhdGFodWIiLCJ0eXBlIjoiUEVSU09OQUwiLCJ2ZXJzaW9uIjoiMSIsImV4cCI6MTY1MDY2MDY1NSwianRpIjoiM2E4ZDY3ZTItOTM5Yi00NTY3LWE0MjYtZDdlMDA1ZGU3NjJjIiwic3ViIjoiZGF0YWh1YiIsImlzcyI6ImRhdGFodWItbWV0YWRhdGEtc2VydmljZSJ9.pp_vW2u1tiiTT7U0nDF2EQdcayOMB8jatiOA8Je4JJA' \ +--header 'Authorization: Bearer ' \ --data-raw '[ { "aspect": { @@ -111,13 +114,34 @@ curl --location --request POST 'localhost:8080/openapi/entities/v1/' \ } ]' ``` +##### POST (CREATE) + +The second POST example will write the update ONLY if the entity doesn't exist. If the entity does exist the +command will return an error instead of overwriting the entity. + +In this example we've added an additional URL parameter `createEntityIfNotExists=true` + +```shell +curl --location --request POST 'localhost:8080/openapi/entities/v1/?createEntityIfNotExists=true' \ +--header 'Content-Type: application/json' \ +--header 'Accept: application/json' \ +--header 'Authorization: Bearer ' \ +--data-raw '' +``` + +If the entity doesn't exist the response will be identical to the previous example. In the case where the entity already exists, +the following error will occur. + +> 422 ValidationExceptionCollection{EntityAspect:(urn:li:dataset:(urn:li:dataPlatform:platform,testSchemaIngest,PROD),schemaMetadata) Exceptions: [com.linkedin.metadata.aspect.plugins.validation.AspectValidationException: Cannot perform CREATE if not exists since the entity key already exists.]} + + ##### GET ```shell curl --location --request GET 'localhost:8080/openapi/entities/v1/latest?urns=urn:li:dataset:(urn:li:dataPlatform:platform,testSchemaIngest,PROD)&aspectNames=schemaMetadata' \ --header 'Accept: application/json' \ ---header 'Authorization: Bearer eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIiwiYWN0b3JJZCI6ImRhdGFodWIiLCJ0eXBlIjoiUEVSU09OQUwiLCJ2ZXJzaW9uIjoiMSIsImV4cCI6MTY1MDY2MDY1NSwianRpIjoiM2E4ZDY3ZTItOTM5Yi00NTY3LWE0MjYtZDdlMDA1ZGU3NjJjIiwic3ViIjoiZGF0YWh1YiIsImlzcyI6ImRhdGFodWItbWV0YWRhdGEtc2VydmljZSJ9.pp_vW2u1tiiTT7U0nDF2EQdcayOMB8jatiOA8Je4JJA' +--header 'Authorization: Bearer ' ``` ##### DELETE @@ -125,7 +149,7 @@ curl --location --request GET 'localhost:8080/openapi/entities/v1/latest?urns=ur ```shell curl --location --request DELETE 'localhost:8080/openapi/entities/v1/?urns=urn:li:dataset:(urn:li:dataPlatform:platform,testSchemaIngest,PROD)&soft=true' \ --header 'Accept: application/json' \ ---header 'Authorization: Bearer eyJhbGciOiJIUzI1NiJ9.eyJhY3RvclR5cGUiOiJVU0VSIiwiYWN0b3JJZCI6ImRhdGFodWIiLCJ0eXBlIjoiUEVSU09OQUwiLCJ2ZXJzaW9uIjoiMSIsImV4cCI6MTY1MDY2MDY1NSwianRpIjoiM2E4ZDY3ZTItOTM5Yi00NTY3LWE0MjYtZDdlMDA1ZGU3NjJjIiwic3ViIjoiZGF0YWh1YiIsImlzcyI6ImRhdGFodWItbWV0YWRhdGEtc2VydmljZSJ9.pp_vW2u1tiiTT7U0nDF2EQdcayOMB8jatiOA8Je4JJA' +--header 'Authorization: Bearer ' ``` #### Postman Collection diff --git a/docs/authorization/policies.md b/docs/authorization/policies.md index 4d343f0e95bb5..759489f291a94 100644 --- a/docs/authorization/policies.md +++ b/docs/authorization/policies.md @@ -94,27 +94,29 @@ We currently support the following: **Common metadata privileges** to view & modify any entity within DataHub. -| Common Privileges | Description | -|------------------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------| -| View Entity Page | Allow actor to access the entity page for the resource in the UI. If not granted, it will redirect them to an unauthorized page. | -| Edit Tags | Allow actor to add and remove tags to an asset. | -| Edit Glossary Terms | Allow actor to add and remove glossary terms to an asset. | -| Edit Owners | Allow actor to add and remove owners of an entity. | -| Edit Description | Allow actor to edit the description (documentation) of an entity. | -| Edit Links | Allow actor to edit links associated with an entity. | -| Edit Status | Allow actor to edit the status of an entity (soft deleted or not). | -| Edit Domain | Allow actor to edit the Domain of an entity. | -| Edit Deprecation | Allow actor to edit the Deprecation status of an entity. | -| Edit Assertions | Allow actor to add and remove assertions from an entity. | -| Edit Incidents | Allow actor to raise and resolve incidents for an entity. | -| Edit All | Allow actor to edit any information about an entity. Super user privileges. Controls the ability to ingest using API when REST API Authorization is enabled. | -| Get Timeline API[^1] | Allow actor to get the timeline of an entity via API. | -| Get Entity API[^1] | Allow actor to get an entity via API. | -| Get Timeseries Aspect API[^1] | Allow actor to get a timeseries aspect via API. | -| Get Aspect/Entity Count APIs[^1] | Allow actor to get aspect and entity counts via API. | -| Search API[^1] | Allow actor to search for entities via API. | -| Produce Platform Event API[^1] | Allow actor to ingest a platform event via API. | -| Explain ElasticSearch Query API[^1] | Allow actor to explain an ElasticSearch query. | +| Common Privileges | Description | +|-------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| View Entity Page | Allow actor to access the entity page for the resource in the UI. If not granted, it will redirect them to an unauthorized page. Additionally if the actor does not have this view privilege, the entity will be removed from search results. | +| Edit Tags | Allow actor to add and remove tags to an asset. | +| Edit Glossary Terms | Allow actor to add and remove glossary terms to an asset. | +| Edit Owners | Allow actor to add and remove owners of an entity. | +| Edit Description | Allow actor to edit the description (documentation) of an entity. | +| Edit Links | Allow actor to edit links associated with an entity. | +| Edit Status | Allow actor to edit the status of an entity (soft deleted or not). | +| Edit Domain | Allow actor to edit the Domain of an entity. | +| Edit Deprecation | Allow actor to edit the Deprecation status of an entity. | +| Edit Assertions | Allow actor to add and remove assertions from an entity. | +| Edit Incidents | Allow actor to raise and resolve incidents for an entity. | +| Edit All | Allow actor to edit any information about an entity. Super user privileges. Controls the ability to ingest using API when REST API Authorization is enabled. | +| Get Timeline API[^1] | Allow actor to get the timeline of an entity via API. | +| Get Entity API[^1] | Allow actor to get an entity via API. | +| Get Timeseries Aspect API[^1] | Allow actor to get a timeseries aspect via API. | +| Get Aspect/Entity Count APIs[^1] | Allow actor to get aspect and entity counts via API. | +| Search API[^1] | Allow actor to search for entities via API. | +| Produce Platform Event API[^1] | Allow actor to ingest a platform event via API. | +| Explain ElasticSearch Query API[^1] | Allow actor to explain an ElasticSearch query. | +| Create Entity | Allow creation of the entity if it doesn't already exist. | +| Entity Exists | Allow checking the existence of the entity without any additional access to the entity's data. | [^1]: Only active if REST_API_AUTHORIZATION_ENABLED is true diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/AspectRetriever.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/AspectRetriever.java index 2ef22483da1ca..ee9abe0356753 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/AspectRetriever.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/AspectRetriever.java @@ -5,10 +5,12 @@ import com.linkedin.entity.Aspect; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.util.Pair; import java.net.URISyntaxException; import java.util.Collections; import java.util.Map; import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -33,6 +35,25 @@ default Aspect getLatestAspectObject(@Nonnull final Urn urn, @Nonnull final Stri Map> getLatestAspectObjects(Set urns, Set aspectNames) throws RemoteInvocationException, URISyntaxException; + @Nonnull + default Map entityExists(Set urns) { + Set keyAspectNames = + urns.stream() + .map(Urn::getEntityType) + .distinct() + .map(entityType -> getEntityRegistry().getEntitySpec(entityType).getKeyAspectName()) + .collect(Collectors.toSet()); + + try { + Map> latest = getLatestAspectObjects(urns, keyAspectNames); + return urns.stream() + .map(urn -> Pair.of(urn, latest.containsKey(urn))) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + } catch (RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException(e); + } + } + @Nonnull EntityRegistry getEntityRegistry(); } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java index ddedc96b38577..d79848e40bbcf 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/AspectsBatch.java @@ -31,7 +31,7 @@ public interface AspectsBatch { * * @return batch items */ - default Collection getMCPItems() { + default List getMCPItems() { return getItems().stream() .filter(item -> item instanceof MCPItem) .map(item -> (MCPItem) item) diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPItem.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPItem.java index 8c25f3c4f44de..96b2752516e60 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPItem.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/batch/MCPItem.java @@ -1,14 +1,19 @@ package com.linkedin.metadata.aspect.batch; +import com.google.common.collect.ImmutableSet; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.patch.template.AspectTemplateEngine; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.mxe.MetadataChangeProposal; +import java.util.Set; import javax.annotation.Nullable; /** Represents a proposal to write to the primary data store which may be represented by an MCP */ public interface MCPItem extends BatchItem { + Set CHANGE_TYPES = + ImmutableSet.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.CREATE_ENTITY); + @Nullable MetadataChangeProposal getMetadataChangeProposal(); @@ -27,7 +32,7 @@ static boolean isValidChangeType(ChangeType changeType, AspectSpec aspectSpec) { if (ChangeType.PATCH.equals(changeType)) { return supportsPatch(aspectSpec); } else { - return ChangeType.UPSERT.equals(changeType); + return CHANGE_TYPES.contains(changeType); } } } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMap.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMap.java deleted file mode 100644 index 45e9280199330..0000000000000 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMap.java +++ /dev/null @@ -1,111 +0,0 @@ -package com.linkedin.metadata.aspect.hooks; - -import static com.linkedin.metadata.Constants.DEFAULT_OWNERSHIP_TYPE_URN; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - -import com.linkedin.common.Owner; -import com.linkedin.common.Ownership; -import com.linkedin.common.UrnArray; -import com.linkedin.common.UrnArrayMap; -import com.linkedin.common.urn.Urn; -import com.linkedin.data.template.RecordTemplate; -import com.linkedin.metadata.aspect.AspectRetriever; -import com.linkedin.metadata.aspect.batch.ChangeMCP; -import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; -import com.linkedin.metadata.aspect.plugins.hooks.MutationHook; -import com.linkedin.util.Pair; -import java.util.Collection; -import java.util.Collections; -import java.util.LinkedList; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -/** Hook to populate the ownerType map within the ownership aspect */ -public class OwnerTypeMap extends MutationHook { - public OwnerTypeMap(AspectPluginConfig aspectPluginConfig) { - super(aspectPluginConfig); - } - - @Override - protected Stream> writeMutation( - @Nonnull Collection changeMCPS, @Nonnull AspectRetriever aspectRetriever) { - - List> results = new LinkedList<>(); - - for (ChangeMCP item : changeMCPS) { - if (OWNERSHIP_ASPECT_NAME.equals(item.getAspectName()) && item.getRecordTemplate() != null) { - final Map> oldTypeOwner = - groupByOwnerType(item.getPreviousRecordTemplate()); - final Map> newTypeOwner = groupByOwnerType(item.getRecordTemplate()); - - Set removedTypes = - oldTypeOwner.keySet().stream() - .filter(typeUrn -> !newTypeOwner.containsKey(typeUrn)) - .collect(Collectors.toSet()); - - Set updatedTypes = newTypeOwner.keySet(); - - Map typeOwners = - Stream.concat(removedTypes.stream(), updatedTypes.stream()) - .map( - typeUrn -> { - final String typeFieldName = encodeFieldName(typeUrn.toString()); - if (removedTypes.contains(typeUrn)) { - // removed - return Pair.of(typeFieldName, new UrnArray()); - } - // updated - return Pair.of( - typeFieldName, - new UrnArray( - newTypeOwner.getOrDefault(typeUrn, Collections.emptySet()).stream() - .map(Owner::getOwner) - .collect(Collectors.toSet()))); - }) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)); - - if (!typeOwners.isEmpty()) { - item.getAspect(Ownership.class).setOwnerTypes(new UrnArrayMap(typeOwners)); - results.add(Pair.of(item, true)); - continue; - } - } - - // no op - results.add(Pair.of(item, false)); - } - - return results.stream(); - } - - private static Map> groupByOwnerType( - @Nullable RecordTemplate ownershipRecordTemplate) { - if (ownershipRecordTemplate != null) { - Ownership ownership = new Ownership(ownershipRecordTemplate.data()); - if (!ownership.getOwners().isEmpty()) { - return ownership.getOwners().stream() - .collect( - Collectors.groupingBy( - owner -> - owner.getTypeUrn() != null - ? owner.getTypeUrn() - : DEFAULT_OWNERSHIP_TYPE_URN, - Collectors.toSet())); - } - } - return Collections.emptyMap(); - } - - public static String encodeFieldName(String value) { - return value.replaceAll("[.]", "%2E"); - } - - public static String decodeFieldName(String value) { - return value.replaceAll("%2E", "."); - } -} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java index 6937070a684e2..564fbf32e809f 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/PluginSpec.java @@ -62,7 +62,10 @@ protected boolean isEntityAspectSupported( protected boolean isAspectSupported(@Nonnull String aspectName) { return getConfig().getSupportedEntityAspectNames().stream() - .anyMatch(supported -> supported.getAspectName().equals(aspectName)); + .anyMatch( + supported -> + ENTITY_WILDCARD.equals(supported.getAspectName()) + || supported.getAspectName().equals(aspectName)); } protected boolean isChangeTypeSupported(@Nullable ChangeType changeType) { diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/AspectPluginConfig.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/AspectPluginConfig.java index 00ebcf6b46491..8d9a8d6fc6a69 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/AspectPluginConfig.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/plugins/config/AspectPluginConfig.java @@ -26,6 +26,8 @@ public class AspectPluginConfig { @AllArgsConstructor @Builder public static class EntityAspectName { + public static final EntityAspectName ALL = new EntityAspectName("*", "*"); + @Nonnull private String entityName; @Nonnull private String aspectName; } diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/CreateIfNotExistsValidator.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/CreateIfNotExistsValidator.java new file mode 100644 index 0000000000000..00d11f8acd0d5 --- /dev/null +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/CreateIfNotExistsValidator.java @@ -0,0 +1,75 @@ +package com.linkedin.metadata.aspect.validation; + +import com.linkedin.common.urn.Urn; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.AspectRetriever; +import com.linkedin.metadata.aspect.ReadItem; +import com.linkedin.metadata.aspect.batch.BatchItem; +import com.linkedin.metadata.aspect.batch.ChangeMCP; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.aspect.plugins.validation.AspectPayloadValidator; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.aspect.plugins.validation.ValidationExceptionCollection; +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import javax.annotation.Nonnull; + +/** Common implementation of checking for create if not exists semantics. */ +public class CreateIfNotExistsValidator extends AspectPayloadValidator { + + public CreateIfNotExistsValidator(AspectPluginConfig aspectPluginConfig) { + super(aspectPluginConfig); + } + + @Override + protected Stream validatePreCommitAspects( + @Nonnull Collection changeMCPs, AspectRetriever aspectRetriever) { + + ValidationExceptionCollection exceptions = ValidationExceptionCollection.newCollection(); + + // This logic relies on the fact that key aspects are either explicitly created (in the batch) + // or the key aspect is auto generated as part of the default aspects and included + // within a batch. + // Meaning the presence of the key aspect indicates that the entity doesn't exist and CREATE + // should be allowed + Map> entityKeyMap = + changeMCPs.stream() + .filter(item -> item.getEntitySpec().getKeyAspectName().equals(item.getAspectName())) + .collect(Collectors.groupingBy(ReadItem::getUrn, Collectors.toSet())); + + for (ChangeMCP createEntityItem : + changeMCPs.stream() + .filter(item -> ChangeType.CREATE_ENTITY.equals(item.getChangeType())) + .collect(Collectors.toSet())) { + // if the key aspect is missing in the batch, the entity exists and CREATE_ENTITY should be + // denied + if (!entityKeyMap.containsKey(createEntityItem.getUrn())) { + exceptions.addException( + createEntityItem, + "Cannot perform CREATE_ENTITY if not exists since the entity key already exists."); + } + } + + for (ChangeMCP createItem : + changeMCPs.stream() + .filter(item -> ChangeType.CREATE.equals(item.getChangeType())) + .collect(Collectors.toSet())) { + // if a CREATE item has a previous value, should be denied + if (createItem.getPreviousRecordTemplate() != null) { + exceptions.addException( + createItem, "Cannot perform CREATE since the aspect already exists."); + } + } + + return exceptions.streamAllExceptions(); + } + + @Override + protected Stream validateProposedAspects( + @Nonnull Collection mcpItems, @Nonnull AspectRetriever aspectRetriever) { + return Stream.empty(); + } +} diff --git a/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java index 8a8c13bf18e85..4cdcb7e101811 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/aspect/validation/StructuredPropertiesValidator.java @@ -46,6 +46,8 @@ /** A Validator for StructuredProperties Aspect that is attached to entities like Datasets, etc. */ @Slf4j public class StructuredPropertiesValidator extends AspectPayloadValidator { + private static final Set CHANGE_TYPES = + ImmutableSet.of(ChangeType.CREATE, ChangeType.CREATE_ENTITY, ChangeType.UPSERT); private static final Set VALID_VALUE_STORED_AS_STRING = new HashSet<>( @@ -81,7 +83,7 @@ protected Stream validateProposedAspects( @Nonnull Collection mcpItems, @Nonnull AspectRetriever aspectRetriever) { return validateProposedUpserts( mcpItems.stream() - .filter(i -> ChangeType.UPSERT.equals(i.getChangeType())) + .filter(i -> CHANGE_TYPES.contains(i.getChangeType())) .collect(Collectors.toList()), aspectRetriever); } diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMapTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMapTest.java deleted file mode 100644 index 895744bb182eb..0000000000000 --- a/entity-registry/src/test/java/com/linkedin/metadata/aspect/hooks/OwnerTypeMapTest.java +++ /dev/null @@ -1,220 +0,0 @@ -package com.linkedin.metadata.aspect.hooks; - -import static com.linkedin.metadata.Constants.DEFAULT_OWNERSHIP_TYPE_URN; -import static org.mockito.Mockito.mock; -import static org.testng.Assert.assertEquals; - -import com.linkedin.common.Owner; -import com.linkedin.common.OwnerArray; -import com.linkedin.common.Ownership; -import com.linkedin.common.UrnArray; -import com.linkedin.common.UrnArrayMap; -import com.linkedin.common.urn.Urn; -import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.aspect.AspectRetriever; -import com.linkedin.metadata.aspect.batch.ChangeMCP; -import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.test.metadata.aspect.TestEntityRegistry; -import com.linkedin.test.metadata.aspect.batch.TestMCP; -import java.util.List; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.annotation.Nullable; -import org.testng.annotations.Test; - -public class OwnerTypeMapTest { - private static final AspectRetriever ASPECT_RETRIEVER = mock(AspectRetriever.class); - private static final EntityRegistry ENTITY_REGISTRY = new TestEntityRegistry(); - private static final AspectPluginConfig ASPECT_PLUGIN_CONFIG = - AspectPluginConfig.builder() - .className("some class") - .enabled(true) - .supportedEntityAspectNames( - List.of( - AspectPluginConfig.EntityAspectName.builder() - .entityName("*") - .aspectName("ownership") - .build())) - .build(); - private static final Urn TEST_ENTITY_URN = - UrnUtils.getUrn( - "urn:li:dataset:(urn:li:dataPlatform:bigquery,calm-pagoda-323403.jaffle_shop.orders,PROD)"); - private static final Urn TEST_USER_A = UrnUtils.getUrn("urn:li:corpUser:a"); - private static final Urn TEST_USER_B = UrnUtils.getUrn("urn:li:corpUser:b"); - private static final Urn TEST_GROUP_A = UrnUtils.getUrn("urn:li:corpGroup:a"); - private static final Urn TEST_GROUP_B = UrnUtils.getUrn("urn:li:corpGroup:b"); - private static final Urn TECH_OWNER = - UrnUtils.getUrn("urn:li:ownershipType:__system__technical_owner"); - private static final Urn BUS_OWNER = - UrnUtils.getUrn("urn:li:ownershipType:__system__business_owner"); - - @Test - public void ownershipTypeMutationNoneType() { - OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); - Ownership ownership = buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_GROUP_A, List.of())); - testHook.writeMutation(buildMCP(null, ownership), ASPECT_RETRIEVER); - - assertEquals( - ownership.getOwnerTypes(), - new UrnArrayMap( - Map.of( - DEFAULT_OWNERSHIP_TYPE_URN.toString(), - new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)))), - "Expected generic owners to be grouped by `none` ownership type."); - } - - @Test - public void ownershipTypeMutationNoneTypeAdd() { - OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); - Ownership oldOwnership = buildOwnership(Map.of(TEST_USER_A, List.of())); - Ownership newOwnership = - buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_GROUP_A, List.of())); - testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); - - assertEquals( - newOwnership.getOwnerTypes(), - new UrnArrayMap( - Map.of( - DEFAULT_OWNERSHIP_TYPE_URN.toString(), - new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)))), - "Expected generic owners to be grouped by `none` ownership type."); - } - - @Test - public void ownershipTypeMutationNoneTypeRemove() { - OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); - Ownership oldOwnership = - buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_GROUP_A, List.of())); - Ownership newOwnership = buildOwnership(Map.of(TEST_USER_A, List.of())); - testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); - - assertEquals( - newOwnership.getOwnerTypes(), - new UrnArrayMap( - Map.of(DEFAULT_OWNERSHIP_TYPE_URN.toString(), new UrnArray(List.of(TEST_USER_A)))), - "Expected generic owners to be grouped by `none` ownership type."); - } - - @Test - public void ownershipTypeMutationMixedType() { - OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); - Ownership ownership = - buildOwnership( - Map.of( - TEST_USER_A, - List.of(), - TEST_GROUP_A, - List.of(), - TEST_USER_B, - List.of(BUS_OWNER), - TEST_GROUP_B, - List.of(TECH_OWNER))); - testHook.writeMutation(buildMCP(null, ownership), ASPECT_RETRIEVER); - - assertEquals( - ownership.getOwnerTypes(), - new UrnArrayMap( - Map.of( - DEFAULT_OWNERSHIP_TYPE_URN.toString(), - new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)), - BUS_OWNER.toString(), - new UrnArray(List.of(TEST_USER_B)), - TECH_OWNER.toString(), - new UrnArray(List.of(TEST_GROUP_B)))), - "Expected generic owners to be grouped by `none` ownership type as well as specified types."); - } - - @Test - public void ownershipTypeMutationMixedTypeAdd() { - OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); - Ownership oldOwnership = - buildOwnership(Map.of(TEST_USER_A, List.of(), TEST_USER_B, List.of(BUS_OWNER))); - Ownership newOwnership = - buildOwnership( - Map.of( - TEST_USER_A, - List.of(), - TEST_GROUP_A, - List.of(), - TEST_USER_B, - List.of(BUS_OWNER), - TEST_GROUP_B, - List.of(TECH_OWNER))); - testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); - - assertEquals( - newOwnership.getOwnerTypes(), - new UrnArrayMap( - Map.of( - DEFAULT_OWNERSHIP_TYPE_URN.toString(), - new UrnArray(List.of(TEST_USER_A, TEST_GROUP_A)), - BUS_OWNER.toString(), - new UrnArray(List.of(TEST_USER_B)), - TECH_OWNER.toString(), - new UrnArray(List.of(TEST_GROUP_B)))), - "Expected generic owners to be grouped by `none` ownership type as well as specified types."); - } - - @Test - public void ownershipTypeMutationMixedTypeRemove() { - OwnerTypeMap testHook = new OwnerTypeMap(ASPECT_PLUGIN_CONFIG); - Ownership oldOwnership = - buildOwnership( - Map.of( - TEST_USER_A, - List.of(), - TEST_GROUP_A, - List.of(), - TEST_USER_B, - List.of(BUS_OWNER), - TEST_GROUP_B, - List.of(TECH_OWNER))); - Ownership newOwnership = - buildOwnership(Map.of(TEST_GROUP_A, List.of(), TEST_GROUP_B, List.of(TECH_OWNER))); - testHook.writeMutation(buildMCP(oldOwnership, newOwnership), ASPECT_RETRIEVER); - - assertEquals( - newOwnership.getOwnerTypes(), - new UrnArrayMap( - Map.of( - DEFAULT_OWNERSHIP_TYPE_URN.toString(), - new UrnArray(List.of(TEST_GROUP_A)), - BUS_OWNER.toString(), - new UrnArray(), - TECH_OWNER.toString(), - new UrnArray(List.of(TEST_GROUP_B)))), - "Expected generic owners to be grouped by `none` ownership type as well as specified types."); - } - - private static Ownership buildOwnership(Map> ownershipTypes) { - Ownership ownership = new Ownership(); - ownership.setOwners( - ownershipTypes.entrySet().stream() - .flatMap( - entry -> { - if (entry.getValue().isEmpty()) { - Owner owner = new Owner(); - owner.setOwner(entry.getKey()); - return Stream.of(owner); - } else { - return entry.getValue().stream() - .map( - typeUrn -> { - Owner owner = new Owner(); - owner.setOwner(entry.getKey()); - owner.setTypeUrn(typeUrn); - return owner; - }); - } - }) - .collect(Collectors.toCollection(OwnerArray::new))); - return ownership; - } - - private static Set buildMCP(@Nullable Ownership oldOwnership, Ownership newOwnership) { - return TestMCP.ofOneMCP(TEST_ENTITY_URN, oldOwnership, newOwnership, ENTITY_REGISTRY); - } -} diff --git a/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/CreateIfNotExistsValidatorTest.java b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/CreateIfNotExistsValidatorTest.java new file mode 100644 index 0000000000000..305ce10414a3e --- /dev/null +++ b/entity-registry/src/test/java/com/linkedin/metadata/aspect/validators/CreateIfNotExistsValidatorTest.java @@ -0,0 +1,161 @@ +package com.linkedin.metadata.aspect.validators; + +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; + +import com.linkedin.common.Status; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.aspect.AspectRetriever; +import com.linkedin.metadata.aspect.SystemAspect; +import com.linkedin.metadata.aspect.batch.ChangeMCP; +import com.linkedin.metadata.aspect.plugins.config.AspectPluginConfig; +import com.linkedin.metadata.aspect.plugins.validation.AspectValidationException; +import com.linkedin.metadata.aspect.validation.CreateIfNotExistsValidator; +import com.linkedin.metadata.key.ChartKey; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.test.metadata.aspect.TestEntityRegistry; +import com.linkedin.test.metadata.aspect.batch.TestMCP; +import java.util.List; +import java.util.Set; +import java.util.stream.Collectors; +import org.testng.annotations.BeforeTest; +import org.testng.annotations.Test; + +public class CreateIfNotExistsValidatorTest { + private EntityRegistry entityRegistry; + private AspectRetriever mockAspectRetriever; + + private static final AspectPluginConfig validatorConfig = + AspectPluginConfig.builder() + .supportedOperations(List.of("CREATE", "CREATE_ENTITY")) + .className(CreateIfNotExistsValidator.class.getName()) + .supportedEntityAspectNames(List.of(AspectPluginConfig.EntityAspectName.ALL)) + .enabled(true) + .build(); + + @BeforeTest + public void init() { + entityRegistry = new TestEntityRegistry(); + mockAspectRetriever = mock(AspectRetriever.class); + when(mockAspectRetriever.getEntityRegistry()).thenReturn(entityRegistry); + } + + @Test + public void testCreateIfEntityNotExistsSuccess() { + CreateIfNotExistsValidator test = new CreateIfNotExistsValidator(validatorConfig); + Urn testEntityUrn = UrnUtils.getUrn("urn:li:chart:(looker,baz1)"); + + Set exceptions = + test.validatePreCommit( + List.of( + // Request aspect + TestMCP.builder() + .changeType(ChangeType.CREATE_ENTITY) + .urn(testEntityUrn) + .entitySpec(entityRegistry.getEntitySpec(testEntityUrn.getEntityType())) + .aspectSpec( + entityRegistry + .getEntitySpec(testEntityUrn.getEntityType()) + .getAspectSpec("status")) + .recordTemplate(new Status().setRemoved(false)) + .build(), + // Required key aspect to indicate non-entity existence + TestMCP.builder() + .changeType(ChangeType.CREATE) + .urn(testEntityUrn) + .entitySpec(entityRegistry.getEntitySpec(testEntityUrn.getEntityType())) + .aspectSpec( + entityRegistry + .getEntitySpec(testEntityUrn.getEntityType()) + .getKeyAspectSpec()) + .recordTemplate(new ChartKey().setChartId("looker,baz1")) + .build()), + mockAspectRetriever) + .collect(Collectors.toSet()); + + assertEquals(Set.of(), exceptions); + } + + @Test + public void testCreateIfEntityNotExistsFail() { + CreateIfNotExistsValidator test = new CreateIfNotExistsValidator(validatorConfig); + Urn testEntityUrn = UrnUtils.getUrn("urn:li:chart:(looker,baz1)"); + + ChangeMCP testItem = + TestMCP.builder() + .changeType(ChangeType.CREATE_ENTITY) + .urn(testEntityUrn) + .entitySpec(entityRegistry.getEntitySpec(testEntityUrn.getEntityType())) + .aspectSpec( + entityRegistry.getEntitySpec(testEntityUrn.getEntityType()).getAspectSpec("status")) + .recordTemplate(new Status().setRemoved(false)) + .build(); + + // missing key aspect + Set exceptions = + test.validatePreCommit(List.of(testItem), mockAspectRetriever).collect(Collectors.toSet()); + + assertEquals( + exceptions, + Set.of( + AspectValidationException.forItem( + testItem, + "Cannot perform CREATE_ENTITY if not exists since the entity key already exists."))); + } + + @Test + public void testCreateIfNotExistsSuccess() { + CreateIfNotExistsValidator test = new CreateIfNotExistsValidator(validatorConfig); + Urn testEntityUrn = UrnUtils.getUrn("urn:li:chart:(looker,baz1)"); + + Set exceptions = + test.validatePreCommit( + List.of( + TestMCP.builder() + .changeType(ChangeType.CREATE) + .urn(testEntityUrn) + .entitySpec(entityRegistry.getEntitySpec(testEntityUrn.getEntityType())) + .aspectSpec( + entityRegistry + .getEntitySpec(testEntityUrn.getEntityType()) + .getAspectSpec("status")) + .recordTemplate(new Status().setRemoved(false)) + .build()), + mockAspectRetriever) + .collect(Collectors.toSet()); + + assertEquals(Set.of(), exceptions); + } + + @Test + public void testCreateIfNotExistsFail() { + CreateIfNotExistsValidator test = new CreateIfNotExistsValidator(validatorConfig); + Urn testEntityUrn = UrnUtils.getUrn("urn:li:chart:(looker,baz1)"); + + SystemAspect mockSystemAspect = mock(SystemAspect.class); + when(mockSystemAspect.getRecordTemplate()).thenReturn(new Status().setRemoved(true)); + + TestMCP testItem = + TestMCP.builder() + .changeType(ChangeType.CREATE) + .urn(testEntityUrn) + .entitySpec(entityRegistry.getEntitySpec(testEntityUrn.getEntityType())) + .aspectSpec( + entityRegistry.getEntitySpec(testEntityUrn.getEntityType()).getAspectSpec("status")) + .recordTemplate(new Status().setRemoved(false)) + .previousSystemAspect(mockSystemAspect) + .build(); + + Set exceptions = + test.validatePreCommit(List.of(testItem), mockAspectRetriever).collect(Collectors.toSet()); + + assertEquals( + exceptions, + Set.of( + AspectValidationException.forItem( + testItem, "Cannot perform CREATE since the aspect already exists."))); + } +} diff --git a/metadata-auth/auth-api/build.gradle b/metadata-auth/auth-api/build.gradle index c68c3019bd2b4..7303b79b0c5f0 100644 --- a/metadata-auth/auth-api/build.gradle +++ b/metadata-auth/auth-api/build.gradle @@ -9,11 +9,6 @@ apply plugin: 'maven-publish' apply plugin: 'io.codearte.nexus-staging' apply from: '../../metadata-integration/java/versioning.gradle' - -test { - useJUnit() -} - jar { archiveClassifier = "lib" } @@ -35,6 +30,9 @@ dependencies() { annotationProcessor externalDependency.lombok testImplementation externalDependency.testng + testImplementation externalDependency.mockito + testImplementation 'uk.org.webcompere:system-stubs-testng:2.1.6' + } task sourcesJar(type: Jar) { diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java index 2ae9ee8ab14ea..09ca2936bad92 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authentication/Actor.java @@ -36,4 +36,22 @@ public String toUrnStr() { throw new IllegalArgumentException( String.format("Unrecognized ActorType %s provided", getType())); } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Actor actor = (Actor) o; + + if (type != actor.type) return false; + return id.equals(actor.id); + } + + @Override + public int hashCode() { + int result = type.hashCode(); + result = 31 * result + id.hashCode(); + return result; + } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java index f8d08c6adbd3a..8522a4ea72e83 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/AuthUtil.java @@ -1,57 +1,506 @@ package com.datahub.authorization; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; +import static com.linkedin.metadata.Constants.NOTEBOOK_ENTITY_NAME; +import static com.linkedin.metadata.Constants.REST_API_AUTHORIZATION_ENABLED_ENV; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiOperation.CREATE; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.READ; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; +import static com.linkedin.metadata.authorization.Disjunctive.DENY_ACCESS; +import static com.linkedin.metadata.authorization.PoliciesConfig.API_ENTITY_PRIVILEGE_MAP; +import static com.linkedin.metadata.authorization.PoliciesConfig.API_PRIVILEGE_MAP; + +import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.ImmutableSet; +import com.linkedin.common.urn.Urn; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.authorization.ApiGroup; +import com.linkedin.metadata.authorization.ApiOperation; +import com.linkedin.metadata.authorization.Conjunctive; +import com.linkedin.metadata.authorization.Disjunctive; +import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.browse.BrowseResult; +import com.linkedin.metadata.browse.BrowseResultEntity; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.AutoCompleteEntity; +import com.linkedin.metadata.query.AutoCompleteResult; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.utils.EntityKeyUtils; +import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.util.Pair; +import java.util.Collection; import java.util.List; +import java.util.Map; import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.http.HttpStatus; +/** + * Notes: This class is an attempt to unify privilege checks across APIs. + * + *

Public: The intent is that the public interface uses the typical abstractions for Urns, + * ApiOperation, ApiGroup, and entity type strings + * + *

Private functions can use the more specific Privileges, Disjunctive/Conjunctive interfaces + * required for the policy engine and authorizer + * + *

isAPI...() functions are intended for OpenAPI and Rest.li since they are governed by an enable + * flag. GraphQL is always enabled and should use is...() functions. + */ public class AuthUtil { - public static boolean isAuthorized( - @Nonnull Authorizer authorizer, - @Nonnull String actor, - @Nonnull Optional maybeResourceSpec, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : - privilegeGroup.getAuthorizedPrivilegeGroups()) { - // If any conjunctive privilege group is authorized, then the entire request is authorized. - if (isAuthorized(authorizer, actor, andPrivilegeGroup, maybeResourceSpec)) { - return true; - } + /** + * This should generally follow the policy creation UI with a few exceptions for users, groups, + * containers, etc so that the platform still functions as expected. + */ + public static final Set VIEW_RESTRICTED_ENTITY_TYPES = + ImmutableSet.of( + DATASET_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + CHART_ENTITY_NAME, + ML_MODEL_ENTITY_NAME, + ML_FEATURE_ENTITY_NAME, + ML_MODEL_GROUP_ENTITY_NAME, + ML_FEATURE_TABLE_ENTITY_NAME, + ML_PRIMARY_KEY_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME, + GLOSSARY_TERM_ENTITY_NAME, + GLOSSARY_NODE_ENTITY_NAME, + DOMAIN_ENTITY_NAME, + DATA_PRODUCT_ENTITY_NAME, + NOTEBOOK_ENTITY_NAME); + + /** OpenAPI/Rest.li Methods */ + public static List> isAPIAuthorized( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiGroup apiGroup, + @Nonnull final EntityRegistry entityRegistry, + @Nonnull final Collection mcps) { + + List, MetadataChangeProposal>> changeUrnMCPs = + mcps.stream() + .map( + mcp -> { + Urn urn = mcp.getEntityUrn(); + if (urn == null) { + com.linkedin.metadata.models.EntitySpec entitySpec = + entityRegistry.getEntitySpec(mcp.getEntityType()); + urn = EntityKeyUtils.getUrnFromProposal(mcp, entitySpec.getKeyAspectSpec()); + } + return Pair.of(Pair.of(mcp.getChangeType(), urn), mcp); + }) + .collect(Collectors.toList()); + + Map, Integer> authorizationResult = + isAPIAuthorizedUrns( + authentication, + authorizer, + apiGroup, + changeUrnMCPs.stream().map(Pair::getFirst).collect(Collectors.toSet())); + + return changeUrnMCPs.stream() + .map( + changeUrnMCP -> + Pair.of( + changeUrnMCP.getValue(), + authorizationResult.getOrDefault( + changeUrnMCP.getKey(), HttpStatus.SC_INTERNAL_SERVER_ERROR))) + .collect(Collectors.toList()); + } + + public static Map, Integer> isAPIAuthorizedUrns( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiGroup apiGroup, + @Nonnull final Collection> changeTypeUrns) { + + return changeTypeUrns.stream() + .distinct() + .map( + changeTypePair -> { + final Urn urn = changeTypePair.getSecond(); + switch (changeTypePair.getFirst()) { + case CREATE: + case UPSERT: + case UPDATE: + case RESTATE: + case PATCH: + if (!isAPIAuthorized( + authentication, + authorizer, + lookupAPIPrivilege(apiGroup, UPDATE, urn.getEntityType()), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + return Pair.of(changeTypePair, HttpStatus.SC_FORBIDDEN); + } + break; + case CREATE_ENTITY: + if (!isAPIAuthorized( + authentication, + authorizer, + lookupAPIPrivilege(apiGroup, CREATE, urn.getEntityType()), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + return Pair.of(changeTypePair, HttpStatus.SC_FORBIDDEN); + } + break; + case DELETE: + if (!isAPIAuthorized( + authentication, + authorizer, + lookupAPIPrivilege(apiGroup, DELETE, urn.getEntityType()), + new EntitySpec(urn.getEntityType(), urn.toString()))) { + return Pair.of(changeTypePair, HttpStatus.SC_FORBIDDEN); + } + break; + default: + return Pair.of(changeTypePair, HttpStatus.SC_BAD_REQUEST); + } + return Pair.of(changeTypePair, HttpStatus.SC_OK); + }) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + } + + public static boolean isAPIAuthorizedResult( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final SearchResult result) { + return isAPIAuthorizedEntityUrns( + authentication, + authorizer, + READ, + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())); + } + + public static boolean isAPIAuthorizedResult( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ScrollResult result) { + return isAPIAuthorizedEntityUrns( + authentication, + authorizer, + READ, + result.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())); + } + + public static boolean isAPIAuthorizedResult( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final AutoCompleteResult result) { + return isAPIAuthorizedEntityUrns( + authentication, + authorizer, + READ, + result.getEntities().stream().map(AutoCompleteEntity::getUrn).collect(Collectors.toList())); + } + + public static boolean isAPIAuthorizedResult( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final BrowseResult result) { + return isAPIAuthorizedEntityUrns( + authentication, + authorizer, + READ, + result.getEntities().stream().map(BrowseResultEntity::getUrn).collect(Collectors.toList())); + } + + public static boolean isAPIAuthorizedUrns( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiGroup apiGroup, + @Nonnull final ApiOperation apiOperation, + @Nonnull final Collection urns) { + + if (ApiGroup.ENTITY.equals(apiGroup)) { + return isAPIAuthorizedEntityUrns(authentication, authorizer, apiOperation, urns); } - // If none of the disjunctive privilege groups were authorized, then the entire request is not - // authorized. - return false; + + List resourceSpecs = + urns.stream() + .map(urn -> new EntitySpec(urn.getEntityType(), urn.toString())) + .collect(Collectors.toList()); + + return isAPIAuthorized( + authentication, + authorizer, + lookupAPIPrivilege(apiGroup, apiOperation, null), + resourceSpecs); + } + + public static boolean isAPIAuthorizedEntityUrns( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiOperation apiOperation, + @Nonnull final Collection urns) { + + Map> resourceSpecs = + urns.stream() + .map(urn -> new EntitySpec(urn.getEntityType(), urn.toString())) + .collect(Collectors.groupingBy(EntitySpec::getType)); + + return resourceSpecs.entrySet().stream() + .allMatch( + entry -> + isAPIAuthorized( + authentication, + authorizer, + lookupAPIPrivilege(ENTITY, apiOperation, entry.getKey()), + entry.getValue())); } - public static boolean isAuthorizedForResources( - @Nonnull Authorizer authorizer, - @Nonnull String actor, - @Nonnull List> resourceSpecs, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { - for (ConjunctivePrivilegeGroup andPrivilegeGroup : - privilegeGroup.getAuthorizedPrivilegeGroups()) { - // If any conjunctive privilege group is authorized, then the entire request is authorized. - if (isAuthorizedForResources(authorizer, actor, andPrivilegeGroup, resourceSpecs)) { + public static boolean isAPIAuthorizedEntityType( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiOperation apiOperation, + @Nonnull final String entityType) { + return isAPIAuthorizedEntityType( + authentication, authorizer, ENTITY, apiOperation, List.of(entityType)); + } + + public static boolean isAPIAuthorizedEntityType( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiGroup apiGroup, + @Nonnull final ApiOperation apiOperation, + @Nonnull final String entityType) { + return isAPIAuthorizedEntityType( + authentication, authorizer, apiGroup, apiOperation, List.of(entityType)); + } + + public static boolean isAPIAuthorizedEntityType( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiOperation apiOperation, + @Nonnull final Collection entityTypes) { + + return isAPIAuthorizedEntityType(authentication, authorizer, ENTITY, apiOperation, entityTypes); + } + + public static boolean isAPIAuthorizedEntityType( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiGroup apiGroup, + @Nonnull final ApiOperation apiOperation, + @Nonnull final Collection entityTypes) { + + return entityTypes.stream() + .distinct() + .allMatch( + entityType -> + isAPIAuthorized( + authentication, + authorizer, + lookupAPIPrivilege(apiGroup, apiOperation, entityType), + new EntitySpec(entityType, ""))); + } + + public static boolean isAPIAuthorized( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiGroup apiGroup, + @Nonnull final ApiOperation apiOperation) { + return isAPIAuthorized( + authentication, + authorizer, + lookupAPIPrivilege(apiGroup, apiOperation, null), + (EntitySpec) null); + } + + public static boolean isAPIAuthorized( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final PoliciesConfig.Privilege privilege, + @Nullable final EntitySpec resource) { + return isAPIAuthorized(authentication, authorizer, Disjunctive.disjoint(privilege), resource); + } + + public static boolean isAPIAuthorized( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final PoliciesConfig.Privilege privilege) { + return isAPIAuthorized( + authentication, authorizer, Disjunctive.disjoint(privilege), (EntitySpec) null); + } + + private static boolean isAPIAuthorized( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final Disjunctive> privileges, + @Nullable final EntitySpec resource) { + return isAPIAuthorized( + authentication, authorizer, privileges, resource != null ? List.of(resource) : List.of()); + } + + private static boolean isAPIAuthorized( + @Nonnull final Authentication authentication, + @Nonnull final Authorizer authorizer, + @Nonnull final Disjunctive> privileges, + @Nonnull final Collection resources) { + if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV))) { + return isAuthorized( + authorizer, + authentication.getActor().toUrnStr(), + buildDisjunctivePrivilegeGroup(privileges), + resources); + } else { + return true; + } + } + + /** GraphQL Methods */ + public static boolean canViewEntity( + @Nonnull final String actor, @Nonnull Authorizer authorizer, @Nonnull Urn urn) { + return canViewEntity(actor, authorizer, List.of(urn)); + } + + public static boolean canViewEntity( + @Nonnull final String actor, + @Nonnull final Authorizer authorizer, + @Nonnull final Collection urns) { + + return isAuthorizedEntityUrns(authorizer, actor, READ, urns); + } + + public static boolean isAuthorized( + @Nonnull final String actor, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiGroup apiGroup, + @Nonnull final ApiOperation apiOperation) { + return isAuthorized(authorizer, actor, lookupAPIPrivilege(apiGroup, apiOperation, null), null); + } + + public static boolean isAuthorizedEntityType( + @Nonnull final String actor, + @Nonnull final Authorizer authorizer, + @Nonnull final ApiOperation apiOperation, + @Nonnull final Collection entityTypes) { + + return entityTypes.stream() + .distinct() + .allMatch( + entityType -> + isAuthorized( + authorizer, + actor, + lookupEntityAPIPrivilege(apiOperation, entityType), + new EntitySpec(entityType, ""))); + } + + public static boolean isAuthorizedEntityUrns( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final ApiOperation apiOperation, + @Nonnull final Collection urns) { + return isAuthorizedUrns(authorizer, actor, ENTITY, apiOperation, urns); + } + + public static boolean isAuthorizedUrns( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final ApiGroup apiGroup, + @Nonnull final ApiOperation apiOperation, + @Nonnull final Collection urns) { + + Map> resourceSpecs = + urns.stream() + .map(urn -> new EntitySpec(urn.getEntityType(), urn.toString())) + .collect(Collectors.groupingBy(EntitySpec::getType)); + + return resourceSpecs.entrySet().stream() + .allMatch( + entry -> { + Disjunctive> privileges = + lookupAPIPrivilege(apiGroup, apiOperation, entry.getKey()); + return entry.getValue().stream() + .allMatch(entitySpec -> isAuthorized(authorizer, actor, privileges, entitySpec)); + }); + } + + public static boolean isAuthorized( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final PoliciesConfig.Privilege privilege) { + return isAuthorized( + authorizer, + actor, + buildDisjunctivePrivilegeGroup(Disjunctive.disjoint(privilege)), + (EntitySpec) null); + } + + public static boolean isAuthorized( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final PoliciesConfig.Privilege privilege, + @Nullable final EntitySpec entitySpec) { + return isAuthorized( + authorizer, + actor, + buildDisjunctivePrivilegeGroup(Disjunctive.disjoint(privilege)), + entitySpec); + } + + private static boolean isAuthorized( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final Disjunctive> privileges, + @Nullable EntitySpec maybeResourceSpec) { + return isAuthorized( + authorizer, actor, buildDisjunctivePrivilegeGroup(privileges), maybeResourceSpec); + } + + public static boolean isAuthorized( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final DisjunctivePrivilegeGroup privilegeGroup, + @Nullable final EntitySpec resourceSpec) { + + for (ConjunctivePrivilegeGroup conjunctive : privilegeGroup.getAuthorizedPrivilegeGroups()) { + if (isAuthorized(authorizer, actor, conjunctive, resourceSpec)) { return true; } } - // If none of the disjunctive privilege groups were authorized, then the entire request is not - // authorized. + return false; } private static boolean isAuthorized( - @Nonnull Authorizer authorizer, - @Nonnull String actor, - @Nonnull ConjunctivePrivilegeGroup requiredPrivileges, - @Nonnull Optional resourceSpec) { + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final ConjunctivePrivilegeGroup requiredPrivileges, + @Nullable final EntitySpec resourceSpec) { + + // if no privileges are required, deny + if (requiredPrivileges.getRequiredPrivileges().isEmpty()) { + return false; + } + // Each privilege in a group _must_ all be true to permit the operation. for (final String privilege : requiredPrivileges.getRequiredPrivileges()) { // Create and evaluate an Authorization request. - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege, resourceSpec); - final AuthorizationResult result = authorizer.authorize(request); - if (AuthorizationResult.Type.DENY.equals(result.getType())) { + if (isDenied(authorizer, actor, privilege, resourceSpec)) { // Short circuit. return false; } @@ -59,25 +508,129 @@ private static boolean isAuthorized( return true; } - private static boolean isAuthorizedForResources( - @Nonnull Authorizer authorizer, - @Nonnull String actor, - @Nonnull ConjunctivePrivilegeGroup requiredPrivileges, - @Nonnull List> resourceSpecs) { - // Each privilege in a group _must_ all be true to permit the operation. - for (final String privilege : requiredPrivileges.getRequiredPrivileges()) { - // Create and evaluate an Authorization request. - for (Optional resourceSpec : resourceSpecs) { - final AuthorizationRequest request = - new AuthorizationRequest(actor, privilege, resourceSpec); - final AuthorizationResult result = authorizer.authorize(request); - if (AuthorizationResult.Type.DENY.equals(result.getType())) { - // Short circuit. - return false; - } - } + private static boolean isAuthorized( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final DisjunctivePrivilegeGroup privilegeGroup, + @Nonnull final Collection resourceSpecs) { + + if (resourceSpecs.isEmpty()) { + return isAuthorized(authorizer, actor, privilegeGroup, (EntitySpec) null); } - return true; + + return resourceSpecs.stream() + .allMatch(spec -> isAuthorized(authorizer, actor, privilegeGroup, spec)); + } + + /** Common Methods */ + + /** + * Based on an API group and operation return privileges. Broad level privileges that are not + * specific to an Entity/Aspect. + * + * @param apiGroup + * @param apiOperation + * @return + */ + public static Disjunctive> lookupAPIPrivilege( + @Nonnull ApiGroup apiGroup, @Nonnull ApiOperation apiOperation, @Nullable String entityType) { + + if (ApiGroup.ENTITY.equals(apiGroup) && entityType == null) { + throw new IllegalArgumentException("ENTITY API Group must include an entityType"); + } + + if (ApiGroup.ENTITY.equals(apiGroup)) { + return lookupEntityAPIPrivilege(apiOperation, Set.of(entityType)).get(entityType); + } + + Map>> privMap = + API_PRIVILEGE_MAP.getOrDefault(apiGroup, Map.of()); + + switch (apiOperation) { + // Manage is a conjunction of UPDATE and DELETE + case MANAGE: + return Disjunctive.conjoin( + privMap.getOrDefault(ApiOperation.UPDATE, DENY_ACCESS), + privMap.getOrDefault(ApiOperation.DELETE, DENY_ACCESS)); + default: + return privMap.getOrDefault(apiOperation, DENY_ACCESS); + } + } + + /** + * Returns map of entityType to privileges required for that entity + * + * @param apiOperation + * @param entityTypes + * @return + */ + @VisibleForTesting + static Map>> lookupEntityAPIPrivilege( + @Nonnull ApiOperation apiOperation, @Nonnull Collection entityTypes) { + + return entityTypes.stream() + .distinct() + .map( + entityType -> { + + // Check entity specific privilege map, otherwise default to generic entity + Map>> privMap = + API_ENTITY_PRIVILEGE_MAP.getOrDefault( + entityType, API_PRIVILEGE_MAP.getOrDefault(ApiGroup.ENTITY, Map.of())); + + switch (apiOperation) { + // Manage is a conjunction of UPDATE and DELETE + case MANAGE: + return Pair.of( + entityType, + Disjunctive.conjoin( + privMap.getOrDefault(ApiOperation.UPDATE, DENY_ACCESS), + privMap.getOrDefault(ApiOperation.DELETE, DENY_ACCESS))); + default: + // otherwise default to generic entity + return Pair.of(entityType, privMap.getOrDefault(apiOperation, DENY_ACCESS)); + } + }) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + } + + @VisibleForTesting + static Disjunctive> lookupEntityAPIPrivilege( + @Nonnull ApiOperation apiOperation, @Nonnull String entityType) { + return lookupEntityAPIPrivilege(apiOperation, Set.of(entityType)).get(entityType); + } + + public static DisjunctivePrivilegeGroup buildDisjunctivePrivilegeGroup( + @Nonnull final ApiGroup apiGroup, + @Nonnull final ApiOperation apiOperation, + @Nullable final String entityType) { + return buildDisjunctivePrivilegeGroup(lookupAPIPrivilege(apiGroup, apiOperation, entityType)); + } + + @VisibleForTesting + static DisjunctivePrivilegeGroup buildDisjunctivePrivilegeGroup( + final Disjunctive> privileges) { + return new DisjunctivePrivilegeGroup( + privileges.stream() + .map( + priv -> + new ConjunctivePrivilegeGroup( + priv.stream() + .map(PoliciesConfig.Privilege::getType) + .collect(Collectors.toList()))) + .collect(Collectors.toList())); + } + + private static boolean isDenied( + @Nonnull final Authorizer authorizer, + @Nonnull final String actor, + @Nonnull final String privilege, + @Nullable final EntitySpec resourceSpec) { + // Create and evaluate an Authorization request. + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege, Optional.ofNullable(resourceSpec)); + final AuthorizationResult result = authorizer.authorize(request); + return AuthorizationResult.Type.DENY.equals(result.getType()); } private AuthUtil() {} diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java index adbfdbe3236fc..f085cb119016c 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/ConjunctivePrivilegeGroup.java @@ -17,4 +17,24 @@ public ConjunctivePrivilegeGroup(Collection requiredPrivileges) { public Collection getRequiredPrivileges() { return _requiredPrivileges; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + ConjunctivePrivilegeGroup that = (ConjunctivePrivilegeGroup) o; + + return _requiredPrivileges.equals(that._requiredPrivileges); + } + + @Override + public int hashCode() { + return _requiredPrivileges.hashCode(); + } + + @Override + public String toString() { + return "ConjunctivePrivilegeGroup{" + "_requiredPrivileges=" + _requiredPrivileges + '}'; + } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java index 350476326da9f..d4f8b7c1cdbad 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/DisjunctivePrivilegeGroup.java @@ -1,6 +1,6 @@ package com.datahub.authorization; -import java.util.List; +import java.util.Collection; /** * Represents a group of privilege groups, any of which must be authorized to authorize a request. @@ -8,13 +8,37 @@ *

That is, an OR of privilege groups. */ public class DisjunctivePrivilegeGroup { - private final List _authorizedPrivilegeGroups; + private final Collection _authorizedPrivilegeGroups; - public DisjunctivePrivilegeGroup(List authorizedPrivilegeGroups) { + public DisjunctivePrivilegeGroup( + Collection authorizedPrivilegeGroups) { _authorizedPrivilegeGroups = authorizedPrivilegeGroups; } - public List getAuthorizedPrivilegeGroups() { + public Collection getAuthorizedPrivilegeGroups() { return _authorizedPrivilegeGroups; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + DisjunctivePrivilegeGroup that = (DisjunctivePrivilegeGroup) o; + + return _authorizedPrivilegeGroups.equals(that._authorizedPrivilegeGroups); + } + + @Override + public int hashCode() { + return _authorizedPrivilegeGroups.hashCode(); + } + + @Override + public String toString() { + return "DisjunctivePrivilegeGroup{" + + "_authorizedPrivilegeGroups=" + + _authorizedPrivilegeGroups + + '}'; + } } diff --git a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/SearchAuthorizationConfiguration.java b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/ViewAuthorizationConfiguration.java similarity index 72% rename from metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/SearchAuthorizationConfiguration.java rename to metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/ViewAuthorizationConfiguration.java index cb176130d2e78..e90369a6e39d1 100644 --- a/metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/SearchAuthorizationConfiguration.java +++ b/metadata-auth/auth-api/src/main/java/com/datahub/authorization/config/ViewAuthorizationConfiguration.java @@ -10,15 +10,15 @@ @Data @AllArgsConstructor(access = AccessLevel.PACKAGE) @NoArgsConstructor(access = AccessLevel.PACKAGE) -public class SearchAuthorizationConfiguration { +public class ViewAuthorizationConfiguration { private boolean enabled; - private SearchAuthorizationRecommendationsConfiguration recommendations; + private ViewAuthorizationRecommendationsConfig recommendations; @Builder(toBuilder = true) @Data @AllArgsConstructor(access = AccessLevel.PACKAGE) @NoArgsConstructor(access = AccessLevel.PACKAGE) - public static class SearchAuthorizationRecommendationsConfiguration { + public static class ViewAuthorizationRecommendationsConfig { private boolean peerGroupEnabled; } } diff --git a/metadata-auth/auth-api/src/test/java/com/datahub/authorization/AuthUtilTest.java b/metadata-auth/auth-api/src/test/java/com/datahub/authorization/AuthUtilTest.java new file mode 100644 index 0000000000000..199b0faa933ae --- /dev/null +++ b/metadata-auth/auth-api/src/test/java/com/datahub/authorization/AuthUtilTest.java @@ -0,0 +1,256 @@ +package com.datahub.authorization; + +import static com.linkedin.metadata.Constants.REST_API_AUTHORIZATION_ENABLED_ENV; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiOperation.MANAGE; +import static com.linkedin.metadata.authorization.ApiOperation.READ; +import static com.linkedin.metadata.authorization.PoliciesConfig.API_ENTITY_PRIVILEGE_MAP; +import static com.linkedin.metadata.authorization.PoliciesConfig.API_PRIVILEGE_MAP; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertFalse; +import static org.testng.Assert.assertTrue; + +import com.datahub.authentication.Actor; +import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; +import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.Constants; +import com.linkedin.metadata.authorization.ApiGroup; +import com.linkedin.metadata.authorization.ApiOperation; +import com.linkedin.metadata.authorization.Conjunctive; +import com.linkedin.util.Pair; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.testng.annotations.BeforeClass; +import org.testng.annotations.Listeners; +import org.testng.annotations.Test; +import uk.org.webcompere.systemstubs.environment.EnvironmentVariables; +import uk.org.webcompere.systemstubs.testng.SystemStub; +import uk.org.webcompere.systemstubs.testng.SystemStubsListener; + +@Listeners(SystemStubsListener.class) +public class AuthUtilTest { + @SystemStub private EnvironmentVariables setEnvironment; + + @BeforeClass + public void beforeAll() { + setEnvironment.set(REST_API_AUTHORIZATION_ENABLED_ENV, "true"); + } + + private static final Authentication TEST_AUTH_A = + new Authentication(new Actor(ActorType.USER, "testA"), ""); + private static final Authentication TEST_AUTH_B = + new Authentication(new Actor(ActorType.USER, "testB"), ""); + private static final Urn TEST_ENTITY_1 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:s3,1,PROD)"); + private static final Urn TEST_ENTITY_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,2,PROD)"); + private static final Urn TEST_ENTITY_3 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:snowflake,3,PROD)"); + + @Test + public void testSystemEnvInit() { + assertEquals(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV), "true"); + } + + @Test + public void testSimplePrivilegeGroupBuilder() { + assertEquals( + AuthUtil.buildDisjunctivePrivilegeGroup( + AuthUtil.lookupAPIPrivilege(ENTITY, READ, "dataset")), + new DisjunctivePrivilegeGroup( + List.of( + new ConjunctivePrivilegeGroup(List.of("VIEW_ENTITY_PAGE")), + new ConjunctivePrivilegeGroup(List.of("GET_ENTITY_PRIVILEGE")), + new ConjunctivePrivilegeGroup(List.of("EDIT_ENTITY")), + new ConjunctivePrivilegeGroup(List.of("DELETE_ENTITY"))))); + } + + @Test + public void testManageEntityPrivilegeGroupBuilder() { + assertEquals( + AuthUtil.buildDisjunctivePrivilegeGroup( + AuthUtil.lookupEntityAPIPrivilege(MANAGE, Constants.POLICY_ENTITY_NAME)), + new DisjunctivePrivilegeGroup( + List.of(new ConjunctivePrivilegeGroup(List.of("MANAGE_POLICIES"))))); + } + + @Test + public void testIsAPIAuthorizedUrns() { + Authorizer mockAuthorizer = + mockAuthorizer( + Map.of( + TEST_AUTH_A.getActor().toUrnStr(), + Map.of( + "EDIT_ENTITY", Set.of(TEST_ENTITY_1, TEST_ENTITY_2), + "VIEW_ENTITY_PAGE", Set.of(TEST_ENTITY_3)), + TEST_AUTH_B.getActor().toUrnStr(), + Map.of("VIEW_ENTITY_PAGE", Set.of(TEST_ENTITY_1, TEST_ENTITY_3)))); + + // User A (Entity 1 & 2 Edit, View only Entity 3) + assertTrue( + AuthUtil.isAPIAuthorizedEntityUrns( + TEST_AUTH_A, + mockAuthorizer, + READ, + List.of(TEST_ENTITY_1, TEST_ENTITY_2, TEST_ENTITY_3)), + "Expected read allowed for all entities"); + + assertEquals( + AuthUtil.isAPIAuthorizedUrns( + TEST_AUTH_A, + mockAuthorizer, + ENTITY, + List.of( + Pair.of(ChangeType.UPSERT, TEST_ENTITY_1), + Pair.of(ChangeType.UPSERT, TEST_ENTITY_2), + Pair.of(ChangeType.UPSERT, TEST_ENTITY_3))), + Map.of( + Pair.of(ChangeType.UPSERT, TEST_ENTITY_1), 200, + Pair.of(ChangeType.UPSERT, TEST_ENTITY_2), 200, + Pair.of(ChangeType.UPSERT, TEST_ENTITY_3), 403), + "Expected edit on entities 1 and 2 and denied on 3"); + + assertEquals( + AuthUtil.isAPIAuthorizedUrns( + TEST_AUTH_A, + mockAuthorizer, + ENTITY, + List.of( + Pair.of(ChangeType.DELETE, TEST_ENTITY_1), + Pair.of(ChangeType.DELETE, TEST_ENTITY_2), + Pair.of(ChangeType.DELETE, TEST_ENTITY_3))), + Map.of( + Pair.of(ChangeType.DELETE, TEST_ENTITY_1), 403, + Pair.of(ChangeType.DELETE, TEST_ENTITY_2), 403, + Pair.of(ChangeType.DELETE, TEST_ENTITY_3), 403), + "Expected deny on delete for all entities"); + + // User B Entity 2 Denied, Read access 1 & 3 + assertFalse( + AuthUtil.isAPIAuthorizedEntityUrns( + TEST_AUTH_B, + mockAuthorizer, + READ, + List.of(TEST_ENTITY_1, TEST_ENTITY_2, TEST_ENTITY_3)), + "Expected read denied for based on entity 2"); + assertTrue( + AuthUtil.isAPIAuthorizedEntityUrns( + TEST_AUTH_B, mockAuthorizer, READ, List.of(TEST_ENTITY_1, TEST_ENTITY_3)), + "Expected read allowed due to exclusion of entity 2"); + + assertEquals( + AuthUtil.isAPIAuthorizedUrns( + TEST_AUTH_B, + mockAuthorizer, + ENTITY, + List.of( + Pair.of(ChangeType.UPSERT, TEST_ENTITY_1), + Pair.of(ChangeType.UPSERT, TEST_ENTITY_2), + Pair.of(ChangeType.UPSERT, TEST_ENTITY_3))), + Map.of( + Pair.of(ChangeType.UPSERT, TEST_ENTITY_1), 403, + Pair.of(ChangeType.UPSERT, TEST_ENTITY_2), 403, + Pair.of(ChangeType.UPSERT, TEST_ENTITY_3), 403), + "Expected edit on entities 1-3 to be denied"); + + assertEquals( + AuthUtil.isAPIAuthorizedUrns( + TEST_AUTH_B, + mockAuthorizer, + ENTITY, + List.of( + Pair.of(ChangeType.DELETE, TEST_ENTITY_1), + Pair.of(ChangeType.DELETE, TEST_ENTITY_2), + Pair.of(ChangeType.DELETE, TEST_ENTITY_3))), + Map.of( + Pair.of(ChangeType.DELETE, TEST_ENTITY_1), 403, + Pair.of(ChangeType.DELETE, TEST_ENTITY_2), 403, + Pair.of(ChangeType.DELETE, TEST_ENTITY_3), 403), + "Expected deny on delete for all entities"); + } + + @Test + public void testReadInheritance() { + assertTrue( + AuthUtil.lookupAPIPrivilege(ApiGroup.ENTITY, ApiOperation.READ, "dataset") + .containsAll(API_PRIVILEGE_MAP.get(ENTITY).get(READ)), + "Expected most privileges to imply VIEW"); + } + + @Test + public void testManageConjoin() { + assertTrue( + AuthUtil.lookupAPIPrivilege(ApiGroup.ENTITY, ApiOperation.MANAGE, "dataset") + .contains( + Conjunctive.of( + API_PRIVILEGE_MAP.get(ENTITY).get(ApiOperation.UPDATE).get(0).get(0), + API_PRIVILEGE_MAP.get(ENTITY).get(ApiOperation.DELETE).get(0).get(0))), + "Expected MANAGE to require both EDIT and DELETE"); + } + + @Test + public void testEntityType() { + assertTrue( + AuthUtil.lookupEntityAPIPrivilege(ApiOperation.MANAGE, "dataset") + .contains( + Conjunctive.of( + API_PRIVILEGE_MAP.get(ENTITY).get(ApiOperation.UPDATE).get(0).get(0), + API_PRIVILEGE_MAP.get(ENTITY).get(ApiOperation.DELETE).get(0).get(0))), + "Expected MANAGE on dataset to require both EDIT and DELETE"); + + assertTrue( + AuthUtil.lookupEntityAPIPrivilege(ApiOperation.MANAGE, "dataHubPolicy") + .contains( + Conjunctive.of( + API_ENTITY_PRIVILEGE_MAP + .get("dataHubPolicy") + .get(ApiOperation.UPDATE) + .get(0) + .get(0))), + "Expected MANAGE permission directly on dataHubPolicy entity"); + } + + private Authorizer mockAuthorizer(Map>> allowActorPrivUrn) { + Authorizer authorizer = mock(Authorizer.class); + when(authorizer.authorize(any())) + .thenAnswer( + args -> { + AuthorizationRequest req = args.getArgument(0); + String actorUrn = req.getActorUrn(); + String priv = req.getPrivilege(); + + if (!allowActorPrivUrn.containsKey(actorUrn)) { + return new AuthorizationResult( + req, AuthorizationResult.Type.DENY, String.format("Actor %s denied", actorUrn)); + } + + Map> privMap = allowActorPrivUrn.get(actorUrn); + if (!privMap.containsKey(priv)) { + return new AuthorizationResult( + req, AuthorizationResult.Type.DENY, String.format("Privilege %s denied", priv)); + } + + if (req.getResourceSpec().isPresent()) { + Urn entityUrn = UrnUtils.getUrn(req.getResourceSpec().get().getEntity()); + Set resources = privMap.get(priv); + if (!resources.contains(entityUrn)) { + return new AuthorizationResult( + req, + AuthorizationResult.Type.DENY, + String.format("Entity %s denied", entityUrn)); + } + } + + return new AuthorizationResult(req, AuthorizationResult.Type.ALLOW, "Allowed"); + }); + return authorizer; + } +} diff --git a/metadata-integration/java/datahub-client/src/main/resources/MetadataChangeProposal.avsc b/metadata-integration/java/datahub-client/src/main/resources/MetadataChangeProposal.avsc index 64216636af26d..12ace42f3af27 100644 --- a/metadata-integration/java/datahub-client/src/main/resources/MetadataChangeProposal.avsc +++ b/metadata-integration/java/datahub-client/src/main/resources/MetadataChangeProposal.avsc @@ -101,11 +101,12 @@ "name" : "ChangeType", "namespace" : "com.linkedin.pegasus2avro.events.metadata", "doc" : "Descriptor for a change action", - "symbols" : [ "UPSERT", "CREATE", "UPDATE", "DELETE", "PATCH", "RESTATE" ], + "symbols" : [ "UPSERT", "CREATE", "UPDATE", "DELETE", "PATCH", "RESTATE", "CREATE_ENTITY" ], "symbolDocs" : { - "CREATE" : "NOT SUPPORTED YET\ninsert if not exists. otherwise fail", - "DELETE" : "NOT SUPPORTED YET\ndelete action", - "PATCH" : "NOT SUPPORTED YET\npatch the changes instead of full replace", + "CREATE" : "insert if not exists. otherwise fail", + "CREATE_ENTITY" : "insert if entity not exists. otherwise fail", + "DELETE" : "delete action", + "PATCH" : "patch the changes instead of full replace", "RESTATE" : "Restate an aspect, eg. in a index refresh.", "UPDATE" : "NOT SUPPORTED YET\nupdate if exists. otherwise fail", "UPSERT" : "insert if not exists. otherwise update" diff --git a/metadata-integration/java/datahub-event/src/main/resources/MetadataChangeProposal.avsc b/metadata-integration/java/datahub-event/src/main/resources/MetadataChangeProposal.avsc index 64216636af26d..12ace42f3af27 100644 --- a/metadata-integration/java/datahub-event/src/main/resources/MetadataChangeProposal.avsc +++ b/metadata-integration/java/datahub-event/src/main/resources/MetadataChangeProposal.avsc @@ -101,11 +101,12 @@ "name" : "ChangeType", "namespace" : "com.linkedin.pegasus2avro.events.metadata", "doc" : "Descriptor for a change action", - "symbols" : [ "UPSERT", "CREATE", "UPDATE", "DELETE", "PATCH", "RESTATE" ], + "symbols" : [ "UPSERT", "CREATE", "UPDATE", "DELETE", "PATCH", "RESTATE", "CREATE_ENTITY" ], "symbolDocs" : { - "CREATE" : "NOT SUPPORTED YET\ninsert if not exists. otherwise fail", - "DELETE" : "NOT SUPPORTED YET\ndelete action", - "PATCH" : "NOT SUPPORTED YET\npatch the changes instead of full replace", + "CREATE" : "insert if not exists. otherwise fail", + "CREATE_ENTITY" : "insert if entity not exists. otherwise fail", + "DELETE" : "delete action", + "PATCH" : "patch the changes instead of full replace", "RESTATE" : "Restate an aspect, eg. in a index refresh.", "UPDATE" : "NOT SUPPORTED YET\nupdate if exists. otherwise fail", "UPSERT" : "insert if not exists. otherwise update" diff --git a/metadata-io/build.gradle b/metadata-io/build.gradle index e07a6ee55c21f..532395f158c02 100644 --- a/metadata-io/build.gradle +++ b/metadata-io/build.gradle @@ -15,7 +15,7 @@ dependencies { api project(':metadata-events:mxe-registration') api project(':metadata-events:mxe-utils-avro') api project(':metadata-models') - api project(':metadata-service:restli-client') + api project(':metadata-service:restli-client-api') api project(':metadata-service:configuration') api project(':metadata-service:services') api project(':metadata-operation-context') @@ -62,6 +62,7 @@ dependencies { testImplementation project(path: ':test-models', configuration: 'testDataTemplate') testImplementation project(':datahub-graphql-core') testImplementation project(path: ':metadata-integration:java:datahub-client', configuration: 'shadow') + testImplementation project(':metadata-service:auth-impl') testImplementation externalDependency.testng testImplementation externalDependency.h2 testImplementation externalDependency.mysqlConnector diff --git a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java index 544afc32a52e7..f125164a219d8 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtil.java @@ -22,6 +22,7 @@ import com.linkedin.metadata.aspect.batch.MCPItem; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.DataPlatformInstanceUtils; @@ -29,6 +30,7 @@ import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; +import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; @@ -47,13 +49,47 @@ public class DefaultAspectsUtil { private DefaultAspectsUtil() {} public static final Set SUPPORTED_TYPES = - Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.PATCH); + Set.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.CREATE_ENTITY, ChangeType.PATCH); + + private static boolean keyAspectExcludeFilter(BatchItem item) { + return !item.getEntitySpec().getKeyAspectName().equals(item.getAspectName()); + } + + public static AspectsBatch withAdditionalChanges( + @Nonnull final AspectsBatch inputBatch, + @Nonnull EntityService entityService, + boolean enableBrowseV2) { + /* + * 1. When deadlock occurs within the transaction the default entity key may need to be removed. This cannot happen + * if the batch is fixed with a key aspect prior to the transaction. + * 2. The CreateIfNotExists validator makes decisions based on the presence of the key aspect which + * is based on a database read. We cannot allow manual key aspects to trick the validator into + * thinking the entity doesn't exist. This optimization avoids having to perform yet another read. + * 3. Technically key aspects should always be a CREATE_ENTITY if not exist operation preserving accurate entity creation timestamps + * Removing provided key aspect from input batch, will be replaced with default key aspect if needed based on + * whether it exists in the database. + */ + List result = + inputBatch.getItems().stream() + .filter(DefaultAspectsUtil::keyAspectExcludeFilter) + .collect(Collectors.toCollection(LinkedList::new)); + // Key aspect restored if needed + result.addAll( + DefaultAspectsUtil.getAdditionalChanges( + inputBatch.getMCPItems(), entityService, enableBrowseV2)); + return AspectsBatchImpl.builder() + .aspectRetriever(inputBatch.getAspectRetriever()) + .items(result) + .build(); + } public static List getAdditionalChanges( - @Nonnull AspectsBatch batch, @Nonnull EntityService entityService, boolean browsePathV2) { + @Nonnull Collection batch, + @Nonnull EntityService entityService, + boolean browsePathV2) { Map> itemsByUrn = - batch.getMCPItems().stream() + batch.stream() .filter(item -> SUPPORTED_TYPES.contains(item.getChangeType())) .collect(Collectors.groupingBy(BatchItem::getUrn)); @@ -86,7 +122,7 @@ public static List getAdditionalChanges( .map( entry -> ChangeItemImpl.ChangeItemImplBuilder.build( - getProposalFromAspect( + getProposalFromAspectForDefault( entry.getKey(), entry.getValue(), entityKeyAspect, templateItem), templateItem.getAuditStamp(), entityService)) @@ -102,7 +138,7 @@ public static List getAdditionalChanges( * @param urn entity urn * @return a list of aspect name/aspect pairs to be written */ - public static List> generateDefaultAspects( + private static List> generateDefaultAspects( @Nonnull EntityService entityService, @Nonnull final Urn urn, @Nonnull Set currentBatchAspectNames, @@ -276,7 +312,7 @@ private static DataPlatformInfo getDataPlatformInfo(Urn urn, EntityService en return null; } - private static MetadataChangeProposal getProposalFromAspect( + private static MetadataChangeProposal getProposalFromAspectForDefault( String aspectName, RecordTemplate aspect, RecordTemplate entityKeyAspect, @@ -287,15 +323,10 @@ private static MetadataChangeProposal getProposalFromAspect( // Set net new fields proposal.setAspect(genericAspect); proposal.setAspectName(aspectName); + // already checked existence, default aspects should be changeType CREATE + proposal.setChangeType(ChangeType.CREATE); // Set fields determined from original - // Additional changes should never be set as PATCH, if a PATCH is coming across it should be an - // UPSERT - proposal.setChangeType(templateItem.getChangeType()); - if (ChangeType.PATCH.equals(proposal.getChangeType())) { - proposal.setChangeType(ChangeType.UPSERT); - } - if (templateItem.getSystemMetadata() != null) { proposal.setSystemMetadata(templateItem.getSystemMetadata()); } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java index c261d7fefd411..eacc858f48980 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/client/JavaEntityClient.java @@ -31,6 +31,7 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; +import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.graph.LineageDirection; import com.linkedin.metadata.query.AutoCompleteResult; @@ -47,7 +48,6 @@ import com.linkedin.metadata.search.SearchService; import com.linkedin.metadata.search.client.CachingEntitySearchService; import com.linkedin.metadata.service.RollbackService; -import com.linkedin.metadata.shared.ValidationUtils; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.MetadataChangeProposal; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java index ed3a78ceddba4..61d48c72f4341 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/EntityServiceImpl.java @@ -17,6 +17,7 @@ import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Iterables; import com.google.common.collect.Iterators; import com.google.common.collect.Streams; import com.linkedin.common.AuditStamp; @@ -65,6 +66,7 @@ import com.linkedin.metadata.query.ListUrnsResult; import com.linkedin.metadata.run.AspectRowSummary; import com.linkedin.metadata.snapshot.Snapshot; +import com.linkedin.metadata.utils.AuditStampUtils; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; @@ -625,16 +627,10 @@ public List ingestAspects( @Override public List ingestAspects( @Nonnull final AspectsBatch aspectsBatch, boolean emitMCL, boolean overwrite) { - Set items = new HashSet<>(aspectsBatch.getItems()); - - // Generate additional items as needed - items.addAll(DefaultAspectsUtil.getAdditionalChanges(aspectsBatch, this, enableBrowseV2)); - AspectsBatch withDefaults = - AspectsBatchImpl.builder().aspectRetriever(this).items(items).build(); Timer.Context ingestToLocalDBTimer = MetricUtils.timer(this.getClass(), "ingestAspectsToLocalDB").time(); - List ingestResults = ingestAspectsToLocalDB(withDefaults, overwrite); + List ingestResults = ingestAspectsToLocalDB(aspectsBatch, overwrite); List mclResults = emitMCL(ingestResults, emitMCL); ingestToLocalDBTimer.stop(); @@ -646,24 +642,28 @@ public List ingestAspects( * an update, push the new version into the local DB. Otherwise, do not push the new version, but * just update the system metadata. * - * @param aspectsBatch Collection of the following: an urn associated with the new aspect, name of + * @param inputBatch Collection of the following: an urn associated with the new aspect, name of * the aspect being inserted, and a function to apply to the latest version of the aspect to * get the updated version * @return Details about the new and old version of the aspect */ @Nonnull private List ingestAspectsToLocalDB( - @Nonnull final AspectsBatch aspectsBatch, boolean overwrite) { + @Nonnull final AspectsBatch inputBatch, boolean overwrite) { - if (aspectsBatch.containsDuplicateAspects()) { - log.warn(String.format("Batch contains duplicates: %s", aspectsBatch)); + if (inputBatch.containsDuplicateAspects()) { + log.warn(String.format("Batch contains duplicates: %s", inputBatch)); } return aspectDao .runInTransactionWithRetry( (tx) -> { + // Generate default aspects within the transaction (they are re-calculated on retry) + AspectsBatch batchWithDefaults = + DefaultAspectsUtil.withAdditionalChanges(inputBatch, this, enableBrowseV2); + // Read before write is unfortunate, however batch it - final Map> urnAspects = aspectsBatch.getUrnAspectsMap(); + final Map> urnAspects = batchWithDefaults.getUrnAspectsMap(); // read #1 final Map> latestAspects = EntityUtils.toSystemAspects(aspectDao.getLatestAspects(urnAspects), this); @@ -674,7 +674,7 @@ private List ingestAspectsToLocalDB( // 1. Convert patches to full upserts // 2. Run any entity/aspect level hooks Pair>, List> updatedItems = - aspectsBatch.toUpsertBatchItems(latestAspects); + batchWithDefaults.toUpsertBatchItems(latestAspects); // Fetch additional information if needed final Map> updatedLatestAspects; @@ -816,7 +816,7 @@ private List ingestAspectsToLocalDB( return upsertResults; }, - aspectsBatch, + inputBatch, DEFAULT_MAX_TRANSACTION_RETRY) .stream() .flatMap(List::stream) @@ -973,18 +973,26 @@ private Stream ingestTimeseriesProposal( if (!async) { // Create default non-timeseries aspects for timeseries aspects - List timeseriesItems = - aspectsBatch.getItems().stream() + List timeseriesKeyAspects = + aspectsBatch.getMCPItems().stream() .filter(item -> item.getAspectSpec().isTimeseries()) + .map( + item -> + ChangeItemImpl.builder() + .urn(item.getUrn()) + .aspectName(item.getEntitySpec().getKeyAspectName()) + .changeType(ChangeType.UPSERT) + .entitySpec(item.getEntitySpec()) + .aspectSpec(item.getEntitySpec().getKeyAspectSpec()) + .auditStamp(item.getAuditStamp()) + .systemMetadata(item.getSystemMetadata()) + .recordTemplate( + EntityUtils.buildKeyAspect(getEntityRegistry(), item.getUrn())) + .build(this)) .collect(Collectors.toList()); - List defaultAspects = - DefaultAspectsUtil.getAdditionalChanges( - AspectsBatchImpl.builder().aspectRetriever(this).items(timeseriesItems).build(), - this, - enableBrowseV2); ingestProposalSync( - AspectsBatchImpl.builder().aspectRetriever(this).items(defaultAspects).build()); + AspectsBatchImpl.builder().aspectRetriever(this).items(timeseriesKeyAspects).build()); } // Emit timeseries MCLs @@ -1087,10 +1095,7 @@ private Stream ingestProposalSync(AspectsBatch aspectsBatch) { List unsupported = nonTimeseries.getMCPItems().stream() - .filter( - item -> - item.getMetadataChangeProposal().getChangeType() != ChangeType.PATCH - && item.getMetadataChangeProposal().getChangeType() != ChangeType.UPSERT) + .filter(item -> !MCPItem.isValidChangeType(item.getChangeType(), item.getAspectSpec())) .collect(Collectors.toList()); if (!unsupported.isEmpty()) { throw new UnsupportedOperationException( @@ -1166,9 +1171,7 @@ public Integer getCountAspect(@Nonnull String aspectName, @Nullable String urnLi @Override public RestoreIndicesResult restoreIndices( @Nonnull RestoreIndicesArgs args, @Nonnull Consumer logger) { - RestoreIndicesResult result = new RestoreIndicesResult(); - int ignored = 0; - int rowsMigrated = 0; + logger.accept(String.format("Args are %s", args)); logger.accept( String.format( @@ -1176,19 +1179,84 @@ public RestoreIndicesResult restoreIndices( args.start, args.start + args.batchSize)); long startTime = System.currentTimeMillis(); PagedList rows = aspectDao.getPagedAspects(args); - result.timeSqlQueryMs = System.currentTimeMillis() - startTime; - startTime = System.currentTimeMillis(); + long timeSqlQueryMs = System.currentTimeMillis() - startTime; + logger.accept( String.format( "Reading rows %s through %s from the aspects table completed.", args.start, args.start + args.batchSize)); - LinkedList> futures = new LinkedList<>(); - List systemAspects = EntityUtils.toSystemAspectFromEbeanAspects( rows != null ? rows.getList() : List.of(), this); + RestoreIndicesResult result = restoreIndices(systemAspects, logger); + + try { + TimeUnit.MILLISECONDS.sleep(args.batchDelayMs); + } catch (InterruptedException e) { + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); + } + + result.timeSqlQueryMs = timeSqlQueryMs; + return result; + } + + @Nonnull + @Override + public List restoreIndices( + @Nonnull Set urns, + @Nullable Set inputAspectNames, + @Nullable Integer inputBatchSize) + throws RemoteInvocationException, URISyntaxException { + int batchSize = inputBatchSize != null ? inputBatchSize : 100; + + List results = new LinkedList<>(); + + for (List urnBatch : Iterables.partition(urns, batchSize)) { + + Map> byEntityType = + urnBatch.stream().collect(Collectors.groupingBy(Urn::getEntityType, Collectors.toSet())); + + for (Map.Entry> entityBatch : byEntityType.entrySet()) { + Set aspectNames = + inputAspectNames != null + ? inputAspectNames + : getEntityAspectNames(entityBatch.getKey()); + + long startTime = System.currentTimeMillis(); + List systemAspects = + EntityUtils.toSystemAspects( + getLatestAspect(entityBatch.getValue(), aspectNames).values(), this); + long timeSqlQueryMs = System.currentTimeMillis() - startTime; + + RestoreIndicesResult result = restoreIndices(systemAspects, s -> {}); + result.timeSqlQueryMs = timeSqlQueryMs; + results.add(result); + } + } + + return results; + } + + /** + * Interface designed to maintain backwards compatibility + * + * @param systemAspects + * @param logger + * @return + */ + private RestoreIndicesResult restoreIndices( + List systemAspects, @Nonnull Consumer logger) { + RestoreIndicesResult result = new RestoreIndicesResult(); + long startTime = System.currentTimeMillis(); + int ignored = 0; + int rowsMigrated = 0; + long defaultAspectsCreated = 0; + + LinkedList> futures = new LinkedList<>(); + for (SystemAspect aspect : systemAspects) { // 1. Extract an Entity type from the entity Urn result.timeGetRowMs = System.currentTimeMillis() - startTime; @@ -1265,6 +1333,7 @@ public RestoreIndicesResult restoreIndices( latestSystemMetadata.setProperties(properties); // 5. Produce MAE events for the aspect record + AuditStamp auditStamp = AuditStampUtils.createDefaultAuditStamp(); futures.add( alwaysProduceMCLAsync( urn, @@ -1275,11 +1344,28 @@ public RestoreIndicesResult restoreIndices( aspectRecord, null, latestSystemMetadata, - new AuditStamp() - .setActor(UrnUtils.getUrn(SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()), + auditStamp, ChangeType.RESTATE) .getFirst()); + + // 6. Ensure default aspects are in existence in SQL + List keyAspect = + List.of( + ChangeItemImpl.builder() + .urn(urn) + .aspectName(entitySpec.getKeyAspectName()) + .changeType(ChangeType.UPSERT) + .entitySpec(entitySpec) + .aspectSpec(entitySpec.getKeyAspectSpec()) + .auditStamp(auditStamp) + .systemMetadata(latestSystemMetadata) + .recordTemplate(EntityUtils.buildKeyAspect(getEntityRegistry(), urn)) + .build(this)); + Stream defaultAspectsResult = + ingestProposalSync( + AspectsBatchImpl.builder().aspectRetriever(this).items(keyAspect).build()); + defaultAspectsCreated += defaultAspectsResult.count(); + result.sendMessageMs += System.currentTimeMillis() - startTime; rowsMigrated++; @@ -1294,14 +1380,10 @@ public RestoreIndicesResult restoreIndices( throw new RuntimeException(e); } }); - try { - TimeUnit.MILLISECONDS.sleep(args.batchDelayMs); - } catch (InterruptedException e) { - throw new RuntimeException( - "Thread interrupted while sleeping after successful batch migration."); - } + result.ignored = ignored; result.rowsMigrated = rowsMigrated; + result.defaultAspectsCreated = defaultAspectsCreated; return result; } @@ -1554,13 +1636,7 @@ private void ingestSnapshotUnion( final List> aspectRecordsToIngest = NewModelUtils.getAspectsFromSnapshot(snapshotRecord); - log.info("INGEST urn {} with system metadata {}", urn.toString(), systemMetadata.toString()); - aspectRecordsToIngest.addAll( - DefaultAspectsUtil.generateDefaultAspects( - this, - urn, - aspectRecordsToIngest.stream().map(Pair::getFirst).collect(Collectors.toSet()), - enableBrowseV2)); + log.info("INGEST urn {} with system metadata {}", urn, systemMetadata.toString()); AspectsBatchImpl aspectsBatch = AspectsBatchImpl.builder() diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java index aa60e7e528673..90e8ff457de3d 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/AspectsBatchImpl.java @@ -34,6 +34,8 @@ public class AspectsBatchImpl implements AspectsBatch { /** * Convert patches to upserts, apply hooks at the aspect and batch level. * + *

Filter CREATE if not exists + * * @param latestAspects latest version in the database * @return The new urn/aspectnames and the uniform upserts, possibly expanded/mutated by the * various hooks diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/ChangeItemImpl.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/ChangeItemImpl.java index b2e3363547dd0..fd539d086490f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/ChangeItemImpl.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/ebean/batch/ChangeItemImpl.java @@ -38,7 +38,6 @@ @Getter @Builder(toBuilder = true) public class ChangeItemImpl implements ChangeMCP { - public static ChangeItemImpl fromPatch( @Nonnull Urn urn, @Nonnull AspectSpec aspectSpec, @@ -61,6 +60,9 @@ public static ChangeItemImpl fromPatch( return builder.build(aspectRetriever); } + // type of change + @Nonnull private final ChangeType changeType; + // urn an urn associated with the new aspect @Nonnull private final Urn urn; @@ -82,12 +84,6 @@ public static ChangeItemImpl fromPatch( @Setter @Nullable private SystemAspect previousSystemAspect; @Setter private long nextAspectVersion; - @Nonnull - @Override - public ChangeType getChangeType() { - return ChangeType.UPSERT; - } - @Nonnull @Override public SystemAspect getSystemAspect(@Nullable Long version) { @@ -117,6 +113,9 @@ public ChangeItemImplBuilder systemMetadata(SystemMetadata systemMetadata) { @SneakyThrows public ChangeItemImpl build(AspectRetriever aspectRetriever) { + // Apply change type default + this.changeType = validateOrDefaultChangeType(changeType); + ValidationUtils.validateUrn(aspectRetriever.getEntityRegistry(), this.urn); log.debug("entity type = {}", this.urn.getEntityType()); @@ -130,6 +129,7 @@ public ChangeItemImpl build(AspectRetriever aspectRetriever) { this.entitySpec, this.urn, this.recordTemplate, aspectRetriever); return new ChangeItemImpl( + this.changeType, this.urn, this.aspectName, this.recordTemplate, @@ -144,10 +144,6 @@ public ChangeItemImpl build(AspectRetriever aspectRetriever) { public static ChangeItemImpl build( MetadataChangeProposal mcp, AuditStamp auditStamp, AspectRetriever aspectRetriever) { - if (!mcp.getChangeType().equals(ChangeType.UPSERT)) { - throw new IllegalArgumentException( - "Invalid MCP, this class only supports change type of UPSERT."); - } log.debug("entity type = {}", mcp.getEntityType()); EntitySpec entitySpec = @@ -168,6 +164,7 @@ public static ChangeItemImpl build( } return ChangeItemImpl.builder() + .changeType(mcp.getChangeType()) .urn(urn) .aspectName(mcp.getAspectName()) .systemMetadata( @@ -178,6 +175,16 @@ public static ChangeItemImpl build( .build(aspectRetriever); } + // specific to impl, other impls support PATCH, etc + private static ChangeType validateOrDefaultChangeType(@Nullable ChangeType changeType) { + final ChangeType finalChangeType = changeType == null ? ChangeType.UPSERT : changeType; + if (!CHANGE_TYPES.contains(finalChangeType)) { + throw new IllegalArgumentException( + String.format("ChangeType %s not in %s", changeType, CHANGE_TYPES)); + } + return finalChangeType; + } + private static RecordTemplate convertToRecordTemplate( MetadataChangeProposal mcp, AspectSpec aspectSpec) { RecordTemplate aspect; @@ -218,16 +225,18 @@ public int hashCode() { @Override public String toString() { - return "UpsertBatchItem{" - + "urn=" + return "ChangeItemImpl{" + + "changeType=" + + changeType + + ", urn=" + urn + ", aspectName='" + aspectName + '\'' - + ", systemMetadata=" - + systemMetadata + ", recordTemplate=" + recordTemplate + + ", systemMetadata=" + + systemMetadata + '}'; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java index 16942a02b0e4a..083657a700912 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/entity/validation/ValidationUtils.java @@ -1,16 +1,41 @@ package com.linkedin.metadata.entity.validation; +import com.codahale.metrics.Timer; +import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.validation.ValidationResult; +import com.linkedin.data.template.AbstractArrayTemplate; import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.aspect.AspectRetriever; +import com.linkedin.metadata.browse.BrowseResult; +import com.linkedin.metadata.browse.BrowseResultEntity; +import com.linkedin.metadata.browse.BrowseResultEntityArray; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; +import com.linkedin.metadata.graph.EntityLineageResult; +import com.linkedin.metadata.graph.LineageRelationship; +import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.query.ListResult; +import com.linkedin.metadata.search.LineageScrollResult; +import com.linkedin.metadata.search.LineageSearchEntity; +import com.linkedin.metadata.search.LineageSearchEntityArray; +import com.linkedin.metadata.search.LineageSearchResult; +import com.linkedin.metadata.search.ScrollResult; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.utils.metrics.MetricUtils; import java.net.URISyntaxException; import java.net.URLEncoder; +import java.util.Objects; +import java.util.Set; import java.util.function.Consumer; +import java.util.function.Function; +import java.util.stream.Collectors; +import java.util.stream.Stream; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; @@ -123,5 +148,242 @@ public static void validateUrn(@Nonnull EntityRegistry entityRegistry, @Nonnull } } + public static SearchResult validateSearchResult( + final SearchResult searchResult, @Nonnull final EntityService entityService) { + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateSearchResult").time()) { + if (searchResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + SearchResult validatedSearchResult = + new SearchResult() + .setFrom(searchResult.getFrom()) + .setMetadata(searchResult.getMetadata()) + .setPageSize(searchResult.getPageSize()) + .setNumEntities(searchResult.getNumEntities()); + + SearchEntityArray validatedEntities = + validateSearchUrns( + searchResult.getEntities(), SearchEntity::getEntity, entityService, true, true) + .collect(Collectors.toCollection(SearchEntityArray::new)); + validatedSearchResult.setEntities(validatedEntities); + + return validatedSearchResult; + } + } + + public static ScrollResult validateScrollResult( + final ScrollResult scrollResult, @Nonnull final EntityService entityService) { + if (scrollResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + ScrollResult validatedScrollResult = + new ScrollResult() + .setMetadata(scrollResult.getMetadata()) + .setPageSize(scrollResult.getPageSize()) + .setNumEntities(scrollResult.getNumEntities()); + if (scrollResult.getScrollId() != null) { + validatedScrollResult.setScrollId(scrollResult.getScrollId()); + } + + SearchEntityArray validatedEntities = + validateSearchUrns( + scrollResult.getEntities(), SearchEntity::getEntity, entityService, true, true) + .collect(Collectors.toCollection(SearchEntityArray::new)); + + validatedScrollResult.setEntities(validatedEntities); + + return validatedScrollResult; + } + + public static BrowseResult validateBrowseResult( + final BrowseResult browseResult, @Nonnull final EntityService entityService) { + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateBrowseResult").time()) { + if (browseResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + BrowseResult validatedBrowseResult = + new BrowseResult() + .setGroups(browseResult.getGroups()) + .setMetadata(browseResult.getMetadata()) + .setFrom(browseResult.getFrom()) + .setPageSize(browseResult.getPageSize()) + .setNumGroups(browseResult.getNumGroups()) + .setNumEntities(browseResult.getNumEntities()) + .setNumElements(browseResult.getNumElements()); + + BrowseResultEntityArray validatedEntities = + validateSearchUrns( + browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true, true) + .collect(Collectors.toCollection(BrowseResultEntityArray::new)); + validatedBrowseResult.setEntities(validatedEntities); + + return validatedBrowseResult; + } + } + + public static ListResult validateListResult( + final ListResult listResult, @Nonnull final EntityService entityService) { + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateListResult").time()) { + if (listResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + ListResult validatedListResult = + new ListResult() + .setStart(listResult.getStart()) + .setCount(listResult.getCount()) + .setTotal(listResult.getTotal()); + + UrnArray validatedEntities = + validateSearchUrns( + listResult.getEntities(), Function.identity(), entityService, true, true) + .collect(Collectors.toCollection(UrnArray::new)); + validatedListResult.setEntities(validatedEntities); + + return validatedListResult; + } + } + + public static LineageSearchResult validateLineageSearchResult( + final LineageSearchResult lineageSearchResult, + @Nonnull final EntityService entityService) { + try (Timer.Context ignored = + MetricUtils.timer(ValidationUtils.class, "validateLineageResult").time()) { + if (lineageSearchResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + LineageSearchResult validatedLineageSearchResult = + new LineageSearchResult() + .setMetadata(lineageSearchResult.getMetadata()) + .setFrom(lineageSearchResult.getFrom()) + .setPageSize(lineageSearchResult.getPageSize()) + .setNumEntities(lineageSearchResult.getNumEntities()); + + LineageSearchEntityArray validatedEntities = + validateSearchUrns( + lineageSearchResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true, + true) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + validatedLineageSearchResult.setEntities(validatedEntities); + + log.debug("Returning validated lineage search results"); + return validatedLineageSearchResult; + } + } + + public static EntityLineageResult validateEntityLineageResult( + @Nullable final EntityLineageResult entityLineageResult, + @Nonnull final EntityService entityService) { + if (entityLineageResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + final EntityLineageResult validatedEntityLineageResult = + new EntityLineageResult() + .setStart(entityLineageResult.getStart()) + .setCount(entityLineageResult.getCount()) + .setTotal(entityLineageResult.getTotal()); + + LineageRelationshipArray validatedRelationships = + validateSearchUrns( + entityLineageResult.getRelationships(), + LineageRelationship::getEntity, + entityService, + true, + false) + .collect(Collectors.toCollection(LineageRelationshipArray::new)); + + validatedEntityLineageResult.setFiltered( + (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null + ? entityLineageResult.getFiltered() + : 0) + + entityLineageResult.getRelationships().size() + - validatedRelationships.size()); + validatedEntityLineageResult.setRelationships(validatedRelationships); + + return validatedEntityLineageResult; + } + + public static LineageScrollResult validateLineageScrollResult( + final LineageScrollResult lineageScrollResult, + @Nonnull final EntityService entityService) { + if (lineageScrollResult == null) { + return null; + } + Objects.requireNonNull(entityService, "entityService must not be null"); + + LineageScrollResult validatedLineageScrollResult = + new LineageScrollResult() + .setMetadata(lineageScrollResult.getMetadata()) + .setPageSize(lineageScrollResult.getPageSize()) + .setNumEntities(lineageScrollResult.getNumEntities()); + if (lineageScrollResult.getScrollId() != null) { + validatedLineageScrollResult.setScrollId(lineageScrollResult.getScrollId()); + } + + LineageSearchEntityArray validatedEntities = + validateSearchUrns( + lineageScrollResult.getEntities(), + LineageSearchEntity::getEntity, + entityService, + true, + true) + .collect(Collectors.toCollection(LineageSearchEntityArray::new)); + + validatedLineageScrollResult.setEntities(validatedEntities); + + return validatedLineageScrollResult; + } + + private static Stream validateSearchUrns( + final AbstractArrayTemplate array, + Function urnFunction, + @Nonnull final EntityService entityService, + boolean enforceSQLExistence, + boolean includeSoftDeleted) { + + if (enforceSQLExistence) { + Set existingUrns = + entityService.exists( + array.stream().map(urnFunction).collect(Collectors.toList()), includeSoftDeleted); + return array.stream().filter(item -> existingUrns.contains(urnFunction.apply(item))); + } else { + Set validatedUrns = + array.stream() + .map(urnFunction) + .filter( + urn -> { + try { + validateUrn(entityService.getEntityRegistry(), urn); + return true; + } catch (Exception e) { + log.warn( + "Excluded {} from search result due to {}", + urn.toString(), + e.getMessage()); + } + return false; + }) + .collect(Collectors.toSet()); + return array.stream().filter(item -> validatedUrns.contains(urnFunction.apply(item))); + } + } + private ValidationUtils() {} } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java index bdf405fe36c07..a10758e6537d2 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/graph/SiblingGraphService.java @@ -9,7 +9,7 @@ import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.shared.ValidationUtils; +import com.linkedin.metadata.entity.validation.ValidationUtils; import java.util.HashSet; import java.util.List; import java.util.Map; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java index 59109c8c3de64..9368c83a77c91 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/recommendation/candidatesource/MostPopularSource.java @@ -1,7 +1,7 @@ package com.linkedin.metadata.recommendation.candidatesource; import com.codahale.metrics.Timer; -import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import com.datahub.authorization.config.ViewAuthorizationConfiguration; import com.datahub.util.exception.ESQueryException; import com.google.common.collect.ImmutableSet; import com.linkedin.metadata.Constants; @@ -154,8 +154,8 @@ private SearchRequest buildSearchRequest(@Nonnull OperationContext opContext) { // If search access controls enabled, restrict user activity to peers private static Optional restrictPeers(@Nonnull OperationContext opContext) { - SearchAuthorizationConfiguration config = - opContext.getOperationContextConfig().getSearchAuthorizationConfiguration(); + ViewAuthorizationConfiguration config = + opContext.getOperationContextConfig().getViewAuthorizationConfiguration(); if (config.isEnabled() && config.getRecommendations().isPeerGroupEnabled() diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java index 2d82dc4001c78..13fec7ad5a016 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/LineageSearchService.java @@ -1,5 +1,6 @@ package com.linkedin.metadata.search; +import static com.datahub.authorization.AuthUtil.canViewEntity; import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; @@ -551,6 +552,7 @@ private LineageSearchResult getSearchResultInBatches( LineageSearchResult resultForBatch = buildLineageSearchResult( + opContext, _searchService.searchAcrossEntities( opContext.withSearchFlags( flags -> applyDefaultSearchFlags(flags, input, DEFAULT_SERVICE_SEARCH_FLAGS)), @@ -684,7 +686,9 @@ private Filter buildFilter(@Nonnull Set urns, @Nullable Filter inputFilters } private LineageSearchResult buildLineageSearchResult( - @Nonnull SearchResult searchResult, Map urnToRelationship) { + @Nonnull OperationContext opContext, + @Nonnull SearchResult searchResult, + Map urnToRelationship) { AggregationMetadataArray aggregations = new AggregationMetadataArray(searchResult.getMetadata().getAggregations()); return new LineageSearchResult() @@ -694,7 +698,9 @@ private LineageSearchResult buildLineageSearchResult( .map( searchEntity -> buildLineageSearchEntity( - searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + opContext, + searchEntity, + urnToRelationship.get(searchEntity.getEntity()))) .collect(Collectors.toList()))) .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) .setFrom(searchResult.getFrom()) @@ -703,10 +709,30 @@ private LineageSearchResult buildLineageSearchResult( } private LineageSearchEntity buildLineageSearchEntity( - @Nonnull SearchEntity searchEntity, @Nullable LineageRelationship lineageRelationship) { + @Nonnull OperationContext opContext, + @Nonnull SearchEntity searchEntity, + @Nullable LineageRelationship lineageRelationship) { LineageSearchEntity entity = new LineageSearchEntity(searchEntity.data()); if (lineageRelationship != null) { - entity.setPaths(lineageRelationship.getPaths()); + entity.setPaths( + lineageRelationship.getPaths().stream() + .filter( + urnArray -> + urnArray.stream() + .allMatch( + urn -> { + if (opContext + .getOperationContextConfig() + .getViewAuthorizationConfiguration() + .isEnabled()) { + return canViewEntity( + opContext.getSessionAuthentication().getActor().toUrnStr(), + opContext.getAuthorizerContext().getAuthorizer(), + urn); + } + return true; + })) + .collect(Collectors.toCollection(UrnArrayArray::new))); entity.setDegree(lineageRelationship.getDegree()); if (lineageRelationship.hasDegrees()) { entity.setDegrees(lineageRelationship.getDegrees()); @@ -835,6 +861,7 @@ private LineageScrollResult getScrollResultInBatches( LineageScrollResult resultForBatch = buildLineageScrollResult( + opContext, _searchService.scrollAcrossEntities( finalOpContext, entitiesToQuery, @@ -858,7 +885,9 @@ private LineageScrollResult getScrollResultInBatches( } private LineageScrollResult buildLineageScrollResult( - @Nonnull ScrollResult scrollResult, Map urnToRelationship) { + @Nonnull OperationContext opContext, + @Nonnull ScrollResult scrollResult, + Map urnToRelationship) { AggregationMetadataArray aggregations = new AggregationMetadataArray(scrollResult.getMetadata().getAggregations()); LineageScrollResult lineageScrollResult = @@ -869,7 +898,9 @@ private LineageScrollResult buildLineageScrollResult( .map( searchEntity -> buildLineageSearchEntity( - searchEntity, urnToRelationship.get(searchEntity.getEntity()))) + opContext, + searchEntity, + urnToRelationship.get(searchEntity.getEntity()))) .collect(Collectors.toList()))) .setMetadata(new SearchResultMetadata().setAggregations(aggregations)) .setPageSize(scrollResult.getPageSize()) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java index 6e5bd63103190..e1a381a8f29d9 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/SearchService.java @@ -40,7 +40,7 @@ public SearchService( public Map docCountPerEntity( @Nonnull OperationContext opContext, @Nonnull List entityNames) { - return entityNames.stream() + return getEntitiesToSearch(opContext, entityNames, 0).stream() .collect( Collectors.toMap( Function.identity(), @@ -73,7 +73,7 @@ public SearchResult search( @Nullable SortCriterion sortCriterion, int from, int size) { - List entitiesToSearch = getEntitiesToSearch(opContext, entityNames); + List entitiesToSearch = getEntitiesToSearch(opContext, entityNames, size); if (entitiesToSearch.isEmpty()) { // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); @@ -146,7 +146,7 @@ public SearchResult searchAcrossEntities( facets = new ArrayList<>(facets); facets.add(INDEX_VIRTUAL_FIELD); } - List nonEmptyEntities = getEntitiesToSearch(opContext, entities); + List nonEmptyEntities = getEntitiesToSearch(opContext, entities, size); if (nonEmptyEntities.isEmpty()) { // Optimization: If the indices are all empty, return empty result return getEmptySearchResult(from, size); @@ -205,8 +205,8 @@ public SearchResult searchAcrossEntities( * @param inputEntities the requested entities * @return some entities to search */ - private List getEntitiesToSearch( - @Nonnull OperationContext opContext, @Nonnull List inputEntities) { + public List getEntitiesToSearch( + @Nonnull OperationContext opContext, @Nonnull List inputEntities, int size) { List nonEmptyEntities; List lowercaseEntities = inputEntities.stream().map(String::toLowerCase).collect(Collectors.toList()); @@ -251,7 +251,7 @@ public ScrollResult scrollAcrossEntities( String.format( "Searching Search documents entities: %s, input: %s, postFilters: %s, sortCriterion: %s, from: %s, size: %s", entities, input, postFilters, sortCriterion, scrollId, size)); - List entitiesToSearch = getEntitiesToSearch(opContext, entities); + List entitiesToSearch = getEntitiesToSearch(opContext, entities, size); if (entitiesToSearch.isEmpty()) { // No indices with non-zero entries: skip querying and return empty result return getEmptyScrollResult(size); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java index 1958bed33c92b..0f1b05b3d0b78 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/indexbuilder/MappingsBuilder.java @@ -42,6 +42,8 @@ public static Map getPartialNgramConfigWithOverrides( public static final Map KEYWORD_TYPE_MAP = ImmutableMap.of(TYPE, KEYWORD); + public static final String SYSTEM_CREATED_FIELD = "systemCreated"; + // Subfields public static final String DELIMITED = "delimited"; public static final String LENGTH = "length"; @@ -121,6 +123,7 @@ public static Map getMappings(@Nonnull final EntitySpec entitySp // Fixed fields mappings.put("urn", getMappingsForUrn()); mappings.put("runId", getMappingsForRunId()); + mappings.put(SYSTEM_CREATED_FIELD, getMappingsForSystemCreated()); return ImmutableMap.of(PROPERTIES, mappings); } @@ -147,6 +150,10 @@ private static Map getMappingsForRunId() { return ImmutableMap.builder().put(TYPE, ESUtils.KEYWORD_FIELD_TYPE).build(); } + private static Map getMappingsForSystemCreated() { + return ImmutableMap.builder().put(TYPE, ESUtils.DATE_FIELD_TYPE).build(); + } + public static Map getMappingsForStructuredProperty( Collection properties) { return properties.stream() diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java index 3fd1062fb25c5..37c9b4c6df655 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/ESSearchDAO.java @@ -189,7 +189,6 @@ private ScrollResult executeAndExtract( @Nonnull List entitySpecs, @Nonnull SearchRequest searchRequest, @Nullable Filter filter, - @Nullable String scrollId, @Nullable String keepAlive, int size) { try (Timer.Context ignored = @@ -200,13 +199,7 @@ private ScrollResult executeAndExtract( SearchRequestHandler.getBuilder( entitySpecs, searchConfiguration, customSearchConfiguration, aspectRetriever) .extractScrollResult( - opContext, - searchResponse, - filter, - scrollId, - keepAlive, - size, - supportsPointInTime())); + opContext, searchResponse, filter, keepAlive, size, supportsPointInTime())); } catch (Exception e) { log.error("Search query failed: {}", searchRequest, e); throw new ESQueryException("Search query failed:", e); @@ -322,7 +315,7 @@ public AutoCompleteResult autoComplete( limit); req.indices(indexConvention.getIndexName(entitySpec)); SearchResponse searchResponse = client.search(req, RequestOptions.DEFAULT); - return builder.extractResult(searchResponse, query); + return builder.extractResult(opContext, searchResponse, query); } catch (Exception e) { log.error("Auto complete query failed:" + e.getMessage()); throw new ESQueryException("Auto complete query failed:", e); @@ -440,7 +433,7 @@ public ScrollResult scroll( scrollRequestTimer.stop(); return executeAndExtract( - opContext, entitySpecs, searchRequest, transformedFilters, scrollId, keepAlive, size); + opContext, entitySpecs, searchRequest, transformedFilters, keepAlive, size); } private SearchRequest getScrollRequest( diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java index 3e6ce53b7af5c..0acedc5d49171 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/AutocompleteRequestHandler.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.search.elasticsearch.query.request; import static com.linkedin.metadata.models.SearchableFieldSpecExtractor.PRIMARY_URN_SEARCH_PROPERTIES; +import static com.linkedin.metadata.search.utils.ESAccessControlUtil.restrictUrn; import static com.linkedin.metadata.search.utils.ESUtils.applyDefaultSearchFilters; import com.google.common.collect.ImmutableList; @@ -164,9 +165,12 @@ private List getAutocompleteFields(@Nullable String field) { } public AutoCompleteResult extractResult( - @Nonnull SearchResponse searchResponse, @Nonnull String input) { + @Nonnull OperationContext opContext, + @Nonnull SearchResponse searchResponse, + @Nonnull String input) { Set results = new LinkedHashSet<>(); Set entityResults = new HashSet<>(); + for (SearchHit hit : searchResponse.getHits()) { Optional matchedFieldValue = hit.getHighlightFields().entrySet().stream() @@ -175,13 +179,16 @@ public AutoCompleteResult extractResult( Optional matchedUrn = Optional.ofNullable((String) hit.getSourceAsMap().get("urn")); try { if (matchedUrn.isPresent()) { - entityResults.add( - new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); + Urn autoCompleteUrn = Urn.createFromString(matchedUrn.get()); + if (!restrictUrn(opContext, autoCompleteUrn)) { + entityResults.add( + new AutoCompleteEntity().setUrn(Urn.createFromString(matchedUrn.get()))); + matchedFieldValue.ifPresent(results::add); + } } } catch (URISyntaxException e) { throw new RuntimeException(String.format("Failed to create urn %s", matchedUrn.get()), e); } - matchedFieldValue.ifPresent(results::add); } return new AutoCompleteResult() .setQuery(input) diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java index 4c704f81b4c13..27d733ae6d353 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchQueryBuilder.java @@ -124,7 +124,8 @@ private QueryBuilder buildInternalQuery( final BoolQueryBuilder finalQuery = Optional.ofNullable(customQueryConfig) .flatMap(cqc -> boolQueryBuilder(cqc, sanitizedQuery)) - .orElse(QueryBuilders.boolQuery()); + .orElse(QueryBuilders.boolQuery()) + .minimumShouldMatch(1); if (fulltext && !query.startsWith(STRUCTURED_QUERY_PREFIX)) { getSimpleQuery(customQueryConfig, entitySpecs, sanitizedQuery).ifPresent(finalQuery::should); diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java index d95e81b616084..bd4340d16f17c 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/elasticsearch/query/request/SearchRequestHandler.java @@ -37,6 +37,7 @@ import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -70,7 +71,7 @@ public class SearchRequestHandler { new ConcurrentHashMap<>(); private final List entitySpecs; private final Set defaultQueryFieldNames; - private final HighlightBuilder highlights; + @Nonnull private final HighlightBuilder highlights; private final SearchConfiguration configs; private final SearchQueryBuilder searchQueryBuilder; @@ -374,7 +375,7 @@ public SearchResult extractResult( int from, int size) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; - List resultList = getResults(opContext, searchResponse); + Collection resultList = getRestrictedResults(opContext, searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(opContext, searchResponse, filter); @@ -391,12 +392,11 @@ public ScrollResult extractScrollResult( @Nonnull OperationContext opContext, @Nonnull SearchResponse searchResponse, Filter filter, - @Nullable String scrollId, @Nullable String keepAlive, int size, boolean supportsPointInTime) { int totalCount = (int) searchResponse.getHits().getTotalHits().value; - List resultList = getResults(opContext, searchResponse); + Collection resultList = getRestrictedResults(opContext, searchResponse); SearchResultMetadata searchResultMetadata = extractSearchResultMetadata(opContext, searchResponse, filter); SearchHit[] searchHits = searchResponse.getHits().getHits(); @@ -493,7 +493,7 @@ private SearchEntity getResult(@Nonnull SearchHit hit) { * @return List of search entities */ @Nonnull - private List getResults( + private Collection getRestrictedResults( @Nonnull OperationContext opContext, @Nonnull SearchResponse searchResponse) { return ESAccessControlUtil.restrictSearchResult( opContext, diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java index a291b27ebebef..b1d8cc075f387 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/transformer/SearchDocumentTransformer.java @@ -3,15 +3,18 @@ import static com.linkedin.metadata.Constants.*; import static com.linkedin.metadata.models.StructuredPropertyUtils.sanitizeStructuredPropertyFQN; import static com.linkedin.metadata.models.annotation.SearchableAnnotation.OBJECT_FIELD_TYPES; +import static com.linkedin.metadata.search.elasticsearch.indexbuilder.MappingsBuilder.SYSTEM_CREATED_FIELD; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.databind.node.JsonNodeFactory; import com.fasterxml.jackson.databind.node.ObjectNode; +import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.schema.DataSchema; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.Aspect; +import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.AspectRetriever; import com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator; import com.linkedin.metadata.models.AspectSpec; @@ -45,7 +48,6 @@ @Setter @RequiredArgsConstructor public class SearchDocumentTransformer { - // Number of elements to index for a given array. // The cap improves search speed when having fields with a large number of elements private final int maxArrayLength; @@ -87,10 +89,25 @@ public Optional transformSnapshot( return Optional.of(searchDocument.toString()); } - public Optional transformAspect( - final Urn urn, - final RecordTemplate aspect, - final AspectSpec aspectSpec, + public static ObjectNode withSystemCreated( + ObjectNode searchDocument, + @Nonnull ChangeType changeType, + @Nonnull EntitySpec entitySpec, + @Nonnull AspectSpec aspectSpec, + @Nonnull final AuditStamp auditStamp) { + + // relies on the MCP processor preventing unneeded key aspects + if (Set.of(ChangeType.CREATE, ChangeType.CREATE_ENTITY, ChangeType.UPSERT).contains(changeType) + && entitySpec.getKeyAspectName().equals(aspectSpec.getName())) { + searchDocument.put(SYSTEM_CREATED_FIELD, auditStamp.getTime()); + } + return searchDocument; + } + + public Optional transformAspect( + final @Nonnull Urn urn, + final @Nonnull RecordTemplate aspect, + final @Nonnull AspectSpec aspectSpec, final Boolean forDelete) throws RemoteInvocationException, URISyntaxException { final Map> extractedSearchableFields = @@ -98,22 +115,23 @@ public Optional transformAspect( final Map> extractedSearchScoreFields = FieldExtractor.extractFields(aspect, aspectSpec.getSearchScoreFieldSpecs(), maxValueLength); - Optional result = Optional.empty(); + Optional result = Optional.empty(); if (!extractedSearchableFields.isEmpty() || !extractedSearchScoreFields.isEmpty()) { final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", urn.toString()); + extractedSearchableFields.forEach( (key, values) -> setSearchableValue(key, values, searchDocument, forDelete)); extractedSearchScoreFields.forEach( (key, values) -> setSearchScoreValue(key, values, searchDocument, forDelete)); - result = Optional.of(searchDocument.toString()); + result = Optional.of(searchDocument); } else if (STRUCTURED_PROPERTIES_ASPECT_NAME.equals(aspectSpec.getName())) { final ObjectNode searchDocument = JsonNodeFactory.instance.objectNode(); searchDocument.put("urn", urn.toString()); setStructuredPropertiesSearchValue( new StructuredProperties(aspect.data()), searchDocument, forDelete); - result = Optional.of(searchDocument.toString()); + result = Optional.of(searchDocument); } return result; diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESAccessControlUtil.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESAccessControlUtil.java index cf3eabb9c5a82..6f5dcee07a5aa 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESAccessControlUtil.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESAccessControlUtil.java @@ -1,89 +1,30 @@ package com.linkedin.metadata.search.utils; -import static com.linkedin.metadata.authorization.PoliciesConfig.VIEW_ENTITY_PRIVILEGES; -import static com.linkedin.metadata.utils.SearchUtil.ES_INDEX_FIELD; -import static com.linkedin.metadata.utils.SearchUtil.KEYWORD_SUFFIX; +import static com.datahub.authorization.AuthUtil.VIEW_RESTRICTED_ENTITY_TYPES; +import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.google.common.collect.ImmutableList; +import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringArray; -import com.linkedin.metadata.aspect.hooks.OwnerTypeMap; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.timeseries.elastic.indexbuilder.MappingsBuilder; -import com.linkedin.policy.DataHubActorFilter; -import com.linkedin.policy.DataHubPolicyInfo; -import com.linkedin.policy.PolicyMatchCriterion; -import com.linkedin.policy.PolicyMatchCriterionArray; -import io.datahubproject.metadata.context.ActorContext; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.services.RestrictedService; import java.util.Collection; import java.util.List; import java.util.Objects; -import java.util.Optional; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import org.opensearch.index.query.BoolQueryBuilder; -import org.opensearch.index.query.QueryBuilder; -import org.opensearch.index.query.QueryBuilders; -import org.opensearch.index.query.TermsQueryBuilder; @Slf4j public class ESAccessControlUtil { private ESAccessControlUtil() {} - private static final String OWNER_TYPES_FIELD = "ownerTypes"; - private static final QueryBuilder MATCH_ALL = QueryBuilders.matchAllQuery(); - - /** - * Given the OperationContext produce a filter for search results - * - * @param opContext the OperationContext of the search - * @return - */ - public static Optional buildAccessControlFilters( - @Nonnull OperationContext opContext) { - Optional response = Optional.empty(); - - /* - If search authorization is enabled AND we're also not the system performing the query - */ - if (opContext.getOperationContextConfig().getSearchAuthorizationConfiguration().isEnabled() - && !opContext.isSystemAuth() - && !opContext.getSearchContext().isRestrictedSearch()) { - - BoolQueryBuilder builder = QueryBuilders.boolQuery(); - - // Apply access policies - streamViewQueries(opContext).distinct().forEach(builder::should); - - if (builder.should().isEmpty()) { - // default no filters - return Optional.of(builder.mustNot(MATCH_ALL)); - } else if (!builder.should().contains(MATCH_ALL)) { - // if MATCH_ALL is not present, apply filters requiring at least 1 - builder.minimumShouldMatch(1); - response = Optional.of(builder); - } - } - - // MATCH_ALL filter present or system user or disabled - return response; - } - /** * Given an OperationContext and SearchResult, mark the restricted entities. Currently, the entire - * entity is marked as restricted using the key aspect name + * entity is marked as restricted using the key aspect name. * * @param searchResult restricted search result */ @@ -92,169 +33,49 @@ public static void restrictSearchResult( restrictSearchResult(opContext, searchResult.getEntities()); } - public static > T restrictSearchResult( - @Nonnull OperationContext opContext, T searchEntities) { - if (opContext.getOperationContextConfig().getSearchAuthorizationConfiguration().isEnabled() - && opContext.getSearchContext().isRestrictedSearch()) { - final EntityRegistry entityRegistry = opContext.getEntityRegistry(); - final String actorUrnStr = - opContext.getSessionActorContext().getAuthentication().getActor().toUrnStr(); - final DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of(new ConjunctivePrivilegeGroup(VIEW_ENTITY_PRIVILEGES))); - - for (SearchEntity searchEntity : searchEntities) { - final String entityType = searchEntity.getEntity().getEntityType(); - final Optional resourceSpec = - Optional.of(new EntitySpec(entityType, searchEntity.getEntity().toString())); - if (!AuthUtil.isAuthorized( - opContext.getAuthorizerContext().getAuthorizer(), actorUrnStr, resourceSpec, orGroup)) { - final String keyAspectName = - entityRegistry.getEntitySpecs().get(entityType.toLowerCase()).getKeyAspectName(); - searchEntity.setRestrictedAspects(new StringArray(List.of(keyAspectName))); + public static Collection restrictSearchResult( + @Nonnull OperationContext opContext, Collection searchEntities) { + if (opContext.getOperationContextConfig().getViewAuthorizationConfiguration().isEnabled() + && !opContext.isSystemAuth()) { + final EntityRegistry entityRegistry = Objects.requireNonNull(opContext.getEntityRegistry()); + final RestrictedService restrictedService = + Objects.requireNonNull(opContext.getServicesRegistryContext()).getRestrictedService(); + final Authentication auth = opContext.getSessionActorContext().getAuthentication(); + final Authorizer authorizer = opContext.getAuthorizerContext().getAuthorizer(); + + if (opContext.getSearchContext().isRestrictedSearch()) { + for (SearchEntity searchEntity : searchEntities) { + final String entityType = searchEntity.getEntity().getEntityType(); + final com.linkedin.metadata.models.EntitySpec entitySpec = + entityRegistry.getEntitySpec(entityType); + + if (VIEW_RESTRICTED_ENTITY_TYPES.contains(entityType) + && !AuthUtil.canViewEntity( + auth.getActor().toUrnStr(), authorizer, searchEntity.getEntity())) { + + // Not authorized && restricted response requested + if (opContext.getSearchContext().isRestrictedSearch()) { + // Restrict entity + searchEntity.setRestrictedAspects( + new StringArray(List.of(entitySpec.getKeyAspectName()))); + + searchEntity.setEntity( + restrictedService.encryptRestrictedUrn(searchEntity.getEntity())); + } + } } } } return searchEntities; } - private static final Function activeMetadataViewEntityPolicyFilter = - policy -> - policy.getPrivileges() != null - && PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState()) - && PoliciesConfig.METADATA_POLICY_TYPE.equals(policy.getType()) - && VIEW_ENTITY_PRIVILEGES.stream() - .anyMatch(priv -> policy.getPrivileges().contains(priv)); - - private static Stream streamViewQueries(OperationContext opContext) { - return opContext.getSessionActorContext().getPolicyInfoSet().stream() - .filter(activeMetadataViewEntityPolicyFilter::apply) - .map( - policy -> { - // Build actor query - QueryBuilder actorQuery = buildActorQuery(opContext, policy); - - if (!policy.hasResources()) { - // no resource restrictions - return actorQuery; - } else { - - // No filters or criteria - if (!policy.getResources().hasFilter() - || !policy.getResources().getFilter().hasCriteria()) { - return null; - } - - PolicyMatchCriterionArray criteriaArray = - policy.getResources().getFilter().getCriteria(); - // Cannot apply policy if we can't map every field - if (!criteriaArray.stream().allMatch(criteria -> toESField(criteria).isPresent())) { - return null; - } - - BoolQueryBuilder resourceQuery = QueryBuilders.boolQuery(); - // apply actor filter if present - if (!MATCH_ALL.equals(actorQuery)) { - resourceQuery.filter(actorQuery); - } - // add resource query - buildResourceQuery(opContext, criteriaArray).forEach(resourceQuery::filter); - return resourceQuery; - } - }) - .filter(Objects::nonNull); - } - - /** - * Build an entity index query for ownership policies. If no restrictions, returns MATCH_ALL query - * - * @param opContext context - * @param policy policy - * @return filter query - */ - private static QueryBuilder buildActorQuery( - OperationContext opContext, DataHubPolicyInfo policy) { - DataHubActorFilter actorFilter = policy.getActors(); - - if (!policy.hasActors() - || !(actorFilter.isResourceOwners() || actorFilter.hasResourceOwnersTypes())) { - // no owner restriction - return MATCH_ALL; - } - - ActorContext actorContext = opContext.getSessionActorContext(); - - // policy might apply to the actor via user or group - List actorAndGroupUrns = - Stream.concat( - Stream.of(actorContext.getAuthentication().getActor().toUrnStr()), - actorContext.getGroupMembership().stream().map(Urn::toString)) - .map(String::toLowerCase) - .distinct() - .collect(Collectors.toList()); - - if (!actorFilter.hasResourceOwnersTypes()) { - // owners without owner type restrictions - return QueryBuilders.termsQuery( - ESUtils.toKeywordField(MappingsBuilder.OWNERS_FIELD, false), actorAndGroupUrns); - } else { - // owners with type restrictions - BoolQueryBuilder orQuery = QueryBuilders.boolQuery(); - orQuery.minimumShouldMatch(1); - - Set typeFields = - actorFilter.getResourceOwnersTypes().stream() - .map( - typeUrn -> - String.format( - "%s.%s%s", - OWNER_TYPES_FIELD, - OwnerTypeMap.encodeFieldName(typeUrn.toString()), - KEYWORD_SUFFIX)) - .collect(Collectors.toSet()); - - typeFields.forEach( - field -> orQuery.should(QueryBuilders.termsQuery(field, actorAndGroupUrns))); - - return orQuery; - } - } - - private static Stream buildResourceQuery( - OperationContext opContext, PolicyMatchCriterionArray criteriaArray) { - return criteriaArray.stream() - .map( - criteria -> - QueryBuilders.termsQuery( - toESField(criteria).get(), toESValues(opContext, criteria))); - } - - private static Optional toESField(PolicyMatchCriterion criterion) { - switch (criterion.getField()) { - case "TYPE": - return Optional.of(ES_INDEX_FIELD); - case "URN": - return Optional.of(ESUtils.toKeywordField(MappingsBuilder.URN_FIELD, false)); - case "TAG": - return Optional.of(ESUtils.toKeywordField(MappingsBuilder.TAGS_FIELD, false)); - case "DOMAIN": - return Optional.of(ESUtils.toKeywordField(MappingsBuilder.DOMAINS_FIELD, false)); - default: - return Optional.empty(); - } - } - - private static Collection toESValues( - OperationContext opContext, PolicyMatchCriterion criterion) { - switch (criterion.getField()) { - case "TYPE": - return criterion.getValues().stream() - .map( - value -> - opContext.getSearchContext().getIndexConvention().getEntityIndexName(value)) - .collect(Collectors.toSet()); - default: - return criterion.getValues(); + public static boolean restrictUrn(@Nonnull OperationContext opContext, @Nonnull Urn urn) { + if (opContext.getOperationContextConfig().getViewAuthorizationConfiguration().isEnabled() + && !opContext.isSystemAuth()) { + final Authentication auth = opContext.getSessionActorContext().getAuthentication(); + final Authorizer authorizer = opContext.getAuthorizerContext().getAuthorizer(); + return !AuthUtil.canViewEntity(auth.getActor().toUrnStr(), authorizer, urn); } + return false; } } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java index 3263773ca064e..6f905b8d31f3f 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/search/utils/ESUtils.java @@ -683,8 +683,6 @@ public static BoolQueryBuilder applyDefaultSearchFilters( @Nonnull BoolQueryBuilder filterQuery) { // filter soft deleted entities by default filterSoftDeletedByDefault(filter, filterQuery, opContext.getSearchContext().getSearchFlags()); - // filter based on access controls - ESAccessControlUtil.buildAccessControlFilters(opContext).ifPresent(filterQuery::filter); return filterQuery; } diff --git a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java index 52f0d680ff4ba..a7020702ff87e 100644 --- a/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java +++ b/metadata-io/src/main/java/com/linkedin/metadata/service/UpdateIndicesService.java @@ -1,6 +1,7 @@ package com.linkedin.metadata.service; import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.search.transformer.SearchDocumentTransformer.withSystemCreated; import static com.linkedin.metadata.search.utils.QueryUtils.*; import com.fasterxml.jackson.core.JsonProcessingException; @@ -59,6 +60,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; @@ -95,7 +97,12 @@ public class UpdateIndicesService implements SearchIndicesService { private boolean _structuredPropertiesWriteEnabled; private static final Set UPDATE_CHANGE_TYPES = - ImmutableSet.of(ChangeType.UPSERT, ChangeType.RESTATE, ChangeType.PATCH); + ImmutableSet.of( + ChangeType.CREATE, + ChangeType.CREATE_ENTITY, + ChangeType.UPSERT, + ChangeType.RESTATE, + ChangeType.PATCH); @VisibleForTesting public void setGraphDiffMode(boolean graphDiffMode) { @@ -182,8 +189,7 @@ private void handleUpdateChangeEvent(@Nonnull final MCLItem event) throws IOExce updateIndexMappings(entitySpec, aspectSpec, aspect, previousAspect); // Step 2. For all aspects, attempt to update Search - updateSearchService( - entitySpec.getName(), urn, aspectSpec, aspect, event.getSystemMetadata(), previousAspect); + updateSearchService(event); // Step 3. For all aspects, attempt to update Graph SystemMetadata systemMetadata = event.getSystemMetadata(); @@ -510,17 +516,29 @@ private static List getMergedEdges(final Set oldEdgeSet, final Set searchDocument; Optional previousSearchDocument = Optional.empty(); try { - searchDocument = _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, false); + searchDocument = + _searchDocumentTransformer + .transformAspect(urn, aspect, aspectSpec, false) + .map( + objectNode -> + withSystemCreated( + objectNode, + event.getChangeType(), + event.getEntitySpec(), + aspectSpec, + event.getAuditStamp())) + .map(Objects::toString); } catch (Exception e) { log.error( "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); @@ -545,7 +563,9 @@ private void updateSearchService( if (previousAspect != null) { try { previousSearchDocument = - _searchDocumentTransformer.transformAspect(urn, previousAspect, aspectSpec, false); + _searchDocumentTransformer + .transformAspect(urn, previousAspect, aspectSpec, false) + .map(Objects::toString); } catch (Exception e) { log.error( "Error in getting documents from previous aspect state: {} for aspect {}, continuing without diffing.", @@ -665,7 +685,9 @@ private void deleteSearchData( Optional searchDocument; try { searchDocument = - _searchDocumentTransformer.transformAspect(urn, aspect, aspectSpec, true); // TODO + _searchDocumentTransformer + .transformAspect(urn, aspect, aspectSpec, true) + .map(Objects::toString); // TODO } catch (Exception e) { log.error( "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtilTest.java b/metadata-io/src/test/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtilTest.java index c38e14711fe96..aea3b5d00543c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtilTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/aspect/utils/DefaultAspectsUtilTest.java @@ -67,7 +67,8 @@ public void testAdditionalChanges() { DefaultAspectsUtil.getAdditionalChanges( AspectsBatchImpl.builder() .mcps(List.of(proposal1), new AuditStamp(), entityServiceImpl) - .build(), + .build() + .getMCPItems(), entityServiceImpl, false) .stream() @@ -75,6 +76,10 @@ public void testAdditionalChanges() { .collect(Collectors.toList()); // proposals for key aspect, browsePath, browsePathV2, dataPlatformInstance Assert.assertEquals(proposalList.size(), 4); - Assert.assertEquals(proposalList.get(0).getChangeType(), ChangeType.UPSERT); + Assert.assertEquals( + proposalList.stream() + .map(MetadataChangeProposal::getChangeType) + .collect(Collectors.toList()), + List.of(ChangeType.CREATE, ChangeType.CREATE, ChangeType.CREATE, ChangeType.CREATE)); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java index e325e23ef8607..25f9e4b28a32a 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/entity/EntityServiceTest.java @@ -11,6 +11,10 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.OwnershipType; import com.linkedin.common.Status; import com.linkedin.common.UrnArray; import com.linkedin.common.VersionedUrn; @@ -41,6 +45,7 @@ import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import com.linkedin.metadata.entity.validation.ValidationException; import com.linkedin.metadata.entity.validation.ValidationUtils; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.key.CorpUserKey; @@ -1903,6 +1908,51 @@ public void testStructuredPropertyIngestProposal() throws Exception { _entityServiceImpl.getAspect(entityUrn, "structuredProperties", 0), expectedProperties); } + @Test + public void testCreateChangeTypeProposal() { + Urn user1 = UrnUtils.getUrn("urn:li:corpuser:test1"); + Urn user2 = UrnUtils.getUrn("urn:li:corpuser:test2"); + Urn entityUrn = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:looker,sample_dataset,PROD)"); + + MetadataChangeProposal initialCreateProposal = new MetadataChangeProposal(); + initialCreateProposal.setEntityUrn(entityUrn); + initialCreateProposal.setChangeType(ChangeType.CREATE); + initialCreateProposal.setEntityType(entityUrn.getEntityType()); + initialCreateProposal.setAspectName(OWNERSHIP_ASPECT_NAME); + initialCreateProposal.setAspect( + GenericRecordUtils.serializeAspect( + new Ownership() + .setOwners( + new OwnerArray( + new Owner() + .setOwner(user1) + .setType(OwnershipType.CUSTOM) + .setTypeUrn(DEFAULT_OWNERSHIP_TYPE_URN))))); + + MetadataChangeProposal secondCreateProposal = new MetadataChangeProposal(); + secondCreateProposal.setEntityUrn(entityUrn); + secondCreateProposal.setChangeType(ChangeType.CREATE); + secondCreateProposal.setEntityType(entityUrn.getEntityType()); + secondCreateProposal.setAspectName(OWNERSHIP_ASPECT_NAME); + secondCreateProposal.setAspect( + GenericRecordUtils.serializeAspect( + new Ownership() + .setOwners( + new OwnerArray( + new Owner() + .setOwner(user2) + .setType(OwnershipType.CUSTOM) + .setTypeUrn(DEFAULT_OWNERSHIP_TYPE_URN))))); + + _entityServiceImpl.ingestProposal(initialCreateProposal, TEST_AUDIT_STAMP, false); + + // create when entity exists should be denied + assertThrows( + ValidationException.class, + () -> _entityServiceImpl.ingestProposal(secondCreateProposal, TEST_AUDIT_STAMP, false)); + } + @Nonnull protected com.linkedin.entity.Entity createCorpUserEntity(Urn entityUrn, String email) throws Exception { diff --git a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java index a2cb9b7412a8e..a28337e30602f 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/graph/sibling/SiblingGraphServiceTest.java @@ -18,6 +18,9 @@ import com.linkedin.metadata.graph.LineageRelationship; import com.linkedin.metadata.graph.LineageRelationshipArray; import com.linkedin.metadata.graph.SiblingGraphService; +import com.linkedin.metadata.models.registry.ConfigEntityRegistry; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.snapshot.Snapshot; import java.net.URISyntaxException; import java.util.Collection; import java.util.HashMap; @@ -68,6 +71,10 @@ public void setup() { _mockEntityService = Mockito.mock(EntityService.class); when(_mockEntityService.exists(any(Collection.class), any(Boolean.class))) .thenAnswer(args -> new HashSet<>(args.getArgument(0))); + EntityRegistry entityRegistry = + new ConfigEntityRegistry( + Snapshot.class.getClassLoader().getResourceAsStream("entity-registry.yml")); + when(_mockEntityService.getEntityRegistry()).thenReturn(entityRegistry); _graphService = Mockito.mock(GraphService.class); _client = new SiblingGraphService(_mockEntityService, _graphService); } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java index f117c42572bd5..976e8a62dff52 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/RecommendationsServiceTest.java @@ -1,13 +1,11 @@ package com.linkedin.metadata.recommendation; -import static org.mockito.Mockito.mock; import static org.testng.Assert.assertEquals; import static org.testng.Assert.assertTrue; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.TestEntityUtil; -import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.candidatesource.TestSource; import com.linkedin.metadata.recommendation.ranker.RecommendationModuleRanker; import com.linkedin.metadata.recommendation.ranker.SimpleRecommendationRanker; @@ -83,7 +81,7 @@ public void testService() throws URISyntaxException { List result = service.listRecommendations( TestOperationContexts.userContextNoSearchAuthorization( - mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), + Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); assertTrue(result.isEmpty()); @@ -95,7 +93,7 @@ public void testService() throws URISyntaxException { result = service.listRecommendations( TestOperationContexts.userContextNoSearchAuthorization( - mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), + Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); assertEquals(result.size(), 1); @@ -112,7 +110,7 @@ public void testService() throws URISyntaxException { result = service.listRecommendations( TestOperationContexts.userContextNoSearchAuthorization( - mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), + Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 10); assertEquals(result.size(), 4); @@ -141,7 +139,7 @@ public void testService() throws URISyntaxException { result = service.listRecommendations( TestOperationContexts.userContextNoSearchAuthorization( - mock(EntityRegistry.class), Urn.createFromString("urn:li:corpuser:me")), + Urn.createFromString("urn:li:corpuser:me")), new RecommendationRequestContext().setScenario(ScenarioType.HOME), 2); assertEquals(result.size(), 2); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java index eb616ee15a292..5c79c000fb256 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/EntitySearchAggregationCandidateSourceTest.java @@ -33,10 +33,10 @@ import org.testng.annotations.Test; public class EntitySearchAggregationCandidateSourceTest { - private EntitySearchService _entitySearchService = mock(EntitySearchService.class); - private EntityRegistry entityRegistry = mock(EntityRegistry.class); - private EntitySearchAggregationSource _valueBasedCandidateSource; - private EntitySearchAggregationSource _urnBasedCandidateSource; + private final EntitySearchService entitySearchService = mock(EntitySearchService.class); + private final EntityRegistry entityRegistry = mock(EntityRegistry.class); + private EntitySearchAggregationSource valueBasedCandidateSource; + private EntitySearchAggregationSource urnBasedCandidateSource; private OperationContext opContext; private static final Urn USER = new CorpuserUrn("test"); @@ -45,15 +45,15 @@ public class EntitySearchAggregationCandidateSourceTest { @BeforeMethod public void setup() { - opContext = TestOperationContexts.userContextNoSearchAuthorization(entityRegistry, USER); - Mockito.reset(_entitySearchService); - _valueBasedCandidateSource = buildCandidateSource("testValue", false); - _urnBasedCandidateSource = buildCandidateSource("testUrn", true); + opContext = TestOperationContexts.userContextNoSearchAuthorization(USER); + Mockito.reset(entitySearchService); + valueBasedCandidateSource = buildCandidateSource("testValue", false); + urnBasedCandidateSource = buildCandidateSource("testUrn", true); } private EntitySearchAggregationSource buildCandidateSource( String identifier, boolean isValueUrn) { - return new EntitySearchAggregationSource(_entitySearchService, entityRegistry) { + return new EntitySearchAggregationSource(entitySearchService, entityRegistry) { @Override protected String getSearchFieldName() { return identifier; @@ -96,24 +96,24 @@ public boolean isEligible( @Test public void testWhenSearchServiceReturnsEmpty() { Mockito.when( - _entitySearchService.aggregateByValue( + entitySearchService.aggregateByValue( any(OperationContext.class), eq(null), eq("testValue"), eq(null), anyInt())) .thenReturn(Collections.emptyMap()); List candidates = - _valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); + valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertTrue(candidates.isEmpty()); - assertFalse(_valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); + assertFalse(valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); } @Test public void testWhenSearchServiceReturnsValueResults() { // One result Mockito.when( - _entitySearchService.aggregateByValue( + entitySearchService.aggregateByValue( any(OperationContext.class), any(), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L)); List candidates = - _valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); + valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), "value1"); @@ -128,14 +128,14 @@ public void testWhenSearchServiceReturnsValueResults() { new Criterion().setField("testValue").setValue("value1")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); - assertTrue(_valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); + assertTrue(valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); // Multiple result Mockito.when( - _entitySearchService.aggregateByValue( + entitySearchService.aggregateByValue( any(OperationContext.class), any(), eq("testValue"), eq(null), anyInt())) .thenReturn(ImmutableMap.of("value1", 1L, "value2", 2L, "value3", 3L)); - candidates = _valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); + candidates = valueBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 2); content = candidates.get(0); assertEquals(content.getValue(), "value3"); @@ -163,7 +163,7 @@ public void testWhenSearchServiceReturnsValueResults() { new Criterion().setField("testValue").setValue("value2")); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); - assertTrue(_valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); + assertTrue(valueBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); } @Test @@ -173,11 +173,11 @@ public void testWhenSearchServiceReturnsUrnResults() { Urn testUrn2 = new TestEntityUrn("testUrn2", "testUrn2", "testUrn2"); Urn testUrn3 = new TestEntityUrn("testUrn3", "testUrn3", "testUrn3"); Mockito.when( - _entitySearchService.aggregateByValue( + entitySearchService.aggregateByValue( any(OperationContext.class), any(), eq("testUrn"), eq(null), anyInt())) .thenReturn(ImmutableMap.of(testUrn1.toString(), 1L)); List candidates = - _urnBasedCandidateSource.getRecommendations(opContext, CONTEXT); + urnBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 1); RecommendationContent content = candidates.get(0); assertEquals(content.getValue(), testUrn1.toString()); @@ -192,16 +192,16 @@ public void testWhenSearchServiceReturnsUrnResults() { new Criterion().setField("testUrn").setValue(testUrn1.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 1L); - assertTrue(_urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); + assertTrue(urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); // Multiple result Mockito.when( - _entitySearchService.aggregateByValue( + entitySearchService.aggregateByValue( any(OperationContext.class), any(), eq("testUrn"), eq(null), anyInt())) .thenReturn( ImmutableMap.of( testUrn1.toString(), 1L, testUrn2.toString(), 2L, testUrn3.toString(), 3L)); - candidates = _urnBasedCandidateSource.getRecommendations(opContext, CONTEXT); + candidates = urnBasedCandidateSource.getRecommendations(opContext, CONTEXT); assertEquals(candidates.size(), 2); content = candidates.get(0); assertEquals(content.getValue(), testUrn3.toString()); @@ -229,6 +229,6 @@ public void testWhenSearchServiceReturnsUrnResults() { new Criterion().setField("testUrn").setValue(testUrn2.toString())); assertNotNull(params.getContentParams()); assertEquals(params.getContentParams().getCount().longValue(), 2L); - assertTrue(_urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); + assertTrue(urnBasedCandidateSource.getRecommendationModule(opContext, CONTEXT).isPresent()); } } diff --git a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java index 51b4ecf1410a1..b8bd4f7bc9919 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/recommendation/candidatesource/RecommendationUtilsTest.java @@ -1,12 +1,9 @@ package com.linkedin.metadata.recommendation.candidatesource; -import static org.mockito.Mockito.mock; - import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.models.registry.EntityRegistry; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.test.metadata.context.TestOperationContexts; import java.util.Collections; @@ -18,8 +15,7 @@ public class RecommendationUtilsTest { @Test private void testIsSupportedEntityType() { Urn testUrn = UrnUtils.getUrn("urn:li:corpuser:john"); - OperationContext opContext = - TestOperationContexts.userContextNoSearchAuthorization(mock(EntityRegistry.class), testUrn); + OperationContext opContext = TestOperationContexts.userContextNoSearchAuthorization(testUrn); Assert.assertTrue( RecommendationUtils.isSupportedEntityType( diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java index 52f91fb1b8c28..22d46c878b256 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/LineageServiceTestBase.java @@ -65,6 +65,7 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.r2.RemoteInvocationException; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.RequestContext; import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.ArrayList; @@ -141,7 +142,7 @@ public void setup() throws RemoteInvocationException, URISyntaxException { operationContext = TestOperationContexts.systemContextNoSearchAuthorization( aspectRetriever.getEntityRegistry(), indexConvention) - .asSession(Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); + .asSession(RequestContext.TEST, Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); settingsBuilder = new SettingsBuilder(null); elasticSearchService = buildEntitySearchService(); elasticSearchService.configure(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java index 3b233ed8ad710..23c961fd182fd 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/SearchServiceTestBase.java @@ -41,6 +41,7 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.r2.RemoteInvocationException; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.RequestContext; import io.datahubproject.test.metadata.context.TestOperationContexts; import java.net.URISyntaxException; import java.util.Map; @@ -91,7 +92,7 @@ public void setup() throws RemoteInvocationException, URISyntaxException { operationContext = TestOperationContexts.systemContextNoSearchAuthorization( aspectRetriever.getEntityRegistry(), indexConvention) - .asSession(Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); + .asSession(RequestContext.TEST, Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); settingsBuilder = new SettingsBuilder(null); elasticSearchService = buildEntitySearchService(); elasticSearchService.configure(); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java index d384d8275bdd7..d7cb401814fb4 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/GoldenTestBase.java @@ -54,7 +54,7 @@ public abstract class GoldenTestBase extends AbstractTestNGSpringContextTests { @Nonnull protected OperationContext getOperationContext() { return TestOperationContexts.userContextNoSearchAuthorization( - getEntityRegistry(), Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); + Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH, getEntityRegistry()); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java index 6950f62d45263..72ad52f27bd2c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/LineageDataFixtureTestBase.java @@ -35,7 +35,7 @@ public abstract class LineageDataFixtureTestBase extends AbstractTestNGSpringCon @Nonnull protected OperationContext getOperationContext() { return TestOperationContexts.userContextNoSearchAuthorization( - getEntityRegistry(), Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH); + Authorizer.EMPTY, TestOperationContexts.TEST_USER_AUTH, getEntityRegistry()); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java index deda14c2216b1..e155fb5135adb 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/fixtures/SampleDataFixtureTestBase.java @@ -91,7 +91,7 @@ public abstract class SampleDataFixtureTestBase extends AbstractTestNGSpringCont @Nonnull protected OperationContext getOperationContext() { return TestOperationContexts.userContextNoSearchAuthorization( - getEntityRegistry(), Authorizer.EMPTY, AUTHENTICATION); + Authorizer.EMPTY, AUTHENTICATION, getEntityRegistry()); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java index 8d504c562c99c..1b4b7de5bf817 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/indexbuilder/MappingsBuilderTest.java @@ -21,7 +21,7 @@ public void testMappingsBuilder() { Map result = MappingsBuilder.getMappings(TestEntitySpecBuilder.getSpec()); assertEquals(result.size(), 1); Map properties = (Map) result.get("properties"); - assertEquals(properties.size(), 21); + assertEquals(properties.size(), 22); assertEquals( properties.get("urn"), ImmutableMap.of( @@ -50,6 +50,7 @@ public void testMappingsBuilder() { "analyzer", "partial_urn_component")))); assertEquals(properties.get("runId"), ImmutableMap.of("type", "keyword")); + assertEquals(properties.get("systemCreated"), ImmutableMap.of("type", "date")); assertTrue(properties.containsKey("browsePaths")); assertTrue(properties.containsKey("browsePathV2")); assertTrue(properties.containsKey("removed")); diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java index 264aa280cac90..d5f292defa1e3 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/SearchDAOTestBase.java @@ -72,9 +72,9 @@ public void setup() throws RemoteInvocationException, URISyntaxException { when(aspectRetriever.getLatestAspectObjects(any(), any())).thenReturn(Map.of()); operationContext = TestOperationContexts.userContextNoSearchAuthorization( - aspectRetriever.getEntityRegistry(), Authorizer.EMPTY, - TestOperationContexts.TEST_USER_AUTH); + TestOperationContexts.TEST_USER_AUTH, + aspectRetriever.getEntityRegistry()); } @Test diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java index be128d7855f39..67723a63c6c3e 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/query/request/SearchRequestHandlerTest.java @@ -135,7 +135,7 @@ public void testSearchRequestHandlerHighlightingTurnedOff() { MISSING_SPECIAL_TYPE + AGGREGATION_SPECIAL_TYPE_DELIMITER + "textFieldOverride")) { - fail("Found unexepected aggregation: " + aggBuilder.getName()); + fail("Found unexpected aggregation: " + aggBuilder.getName()); } } // Highlights should not be present @@ -149,7 +149,9 @@ public void testSearchRequestHandler() { TestEntitySpecBuilder.getSpec(), testQueryConfig, null, aspectRetriever); SearchRequest searchRequest = requestHandler.getSearchRequest( - operationContext.withSearchFlags(flags -> flags.setFulltext(false)), + operationContext.withSearchFlags( + flags -> + flags.setFulltext(false).setSkipHighlighting(false).setSkipAggregates(false)), "testQuery", null, null, @@ -175,7 +177,7 @@ public void testSearchRequestHandler() { MISSING_SPECIAL_TYPE + AGGREGATION_SPECIAL_TYPE_DELIMITER + "textFieldOverride")) { - fail("Found unexepected aggregation: " + aggBuilder.getName()); + fail("Found unexpected aggregation: " + aggBuilder.getName()); } } // Highlights diff --git a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESAccessControlUtilTest.java b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESAccessControlUtilTest.java index 4d97bab1e4214..f793064c7383c 100644 --- a/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESAccessControlUtilTest.java +++ b/metadata-io/src/test/java/com/linkedin/metadata/search/utils/ESAccessControlUtilTest.java @@ -1,6 +1,12 @@ package com.linkedin.metadata.search.utils; -import static org.mockito.ArgumentMatchers.eq; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anySet; +import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import static org.testng.Assert.assertEquals; @@ -8,29 +14,52 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; -import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import com.datahub.authorization.AuthorizerContext; +import com.datahub.authorization.DataHubAuthorizer; +import com.datahub.authorization.DefaultEntitySpecResolver; +import com.datahub.authorization.config.ViewAuthorizationConfiguration; import com.datahub.plugins.auth.authorization.Authorizer; +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.OwnershipType; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; -import com.linkedin.metadata.aspect.hooks.OwnerTypeMap; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.identity.GroupMembership; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.TestEntityRegistry; +import com.linkedin.metadata.search.MatchedFieldArray; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.SearchResultMetadata; import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import com.linkedin.policy.DataHubActorFilter; import com.linkedin.policy.DataHubPolicyInfo; import com.linkedin.policy.DataHubResourceFilter; -import com.linkedin.policy.PolicyMatchCondition; -import com.linkedin.policy.PolicyMatchCriterion; import com.linkedin.policy.PolicyMatchCriterionArray; import com.linkedin.policy.PolicyMatchFilter; +import com.linkedin.r2.RemoteInvocationException; +import com.linkedin.util.Pair; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.RequestContext; +import io.datahubproject.metadata.context.ServicesRegistryContext; +import io.datahubproject.metadata.services.RestrictedService; +import java.net.URISyntaxException; import java.util.List; -import java.util.Optional; +import java.util.Map; import java.util.Set; -import org.opensearch.index.query.QueryBuilders; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import org.testcontainers.shaded.com.google.common.collect.ImmutableMap; import org.testng.annotations.Test; public class ESAccessControlUtilTest { @@ -39,608 +68,562 @@ public class ESAccessControlUtilTest { private static final Urn TEST_GROUP_A = UrnUtils.getUrn("urn:li:corpGroup:a"); private static final Urn TEST_GROUP_B = UrnUtils.getUrn("urn:li:corpGroup:b"); private static final Urn TEST_GROUP_C = UrnUtils.getUrn("urn:li:corpGroup:c"); - private static final Urn TEST_USER_A = UrnUtils.getUrn("urn:li:corpUser:a"); - private static final Urn TEST_USER_B = UrnUtils.getUrn("urn:li:corpUser:b"); + + // User A belongs to Groups A and C + private static final Urn TEST_USER_A = UrnUtils.getUrn("urn:li:corpuser:a"); + private static final Urn TEST_USER_B = UrnUtils.getUrn("urn:li:corpuser:b"); private static final Urn TECH_OWNER = UrnUtils.getUrn("urn:li:ownershipType:__system__technical_owner"); private static final Urn BUS_OWNER = UrnUtils.getUrn("urn:li:ownershipType:__system__business_owner"); - private static final Authentication USER_AUTH = + private static final Authentication USER_A_AUTH = new Authentication(new Actor(ActorType.USER, TEST_USER_A.getId()), ""); + private static final Authentication USER_B_AUTH = + new Authentication(new Actor(ActorType.USER, TEST_USER_B.getId()), ""); private static final OperationContext ENABLED_CONTEXT = OperationContext.asSystem( OperationContextConfig.builder() .allowSystemAuthentication(true) - .searchAuthorizationConfiguration( - SearchAuthorizationConfiguration.builder().enabled(true).build()) + .viewAuthorizationConfiguration( + ViewAuthorizationConfiguration.builder().enabled(true).build()) .build(), - new TestEntityRegistry(), SYSTEM_AUTH, + new TestEntityRegistry(), + ServicesRegistryContext.builder().restrictedService(mockRestrictedService()).build(), IndexConventionImpl.NO_PREFIX); + private static final String VIEW_PRIVILEGE = "VIEW_ENTITY_PAGE"; + + private static final Urn UNRESTRICTED_RESULT_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"); + private static final Urn RESTRICTED_RESULT_URN = + UrnUtils.getUrn("urn:li:restricted:(urn:li:dataPlatform:hive,SampleHiveDataset,PROD)"); + + /** Comprehensive list of policy variations */ + private static final Map TEST_POLICIES = + ImmutableMap.builder() + .put( + "allUsers", + new DataHubPolicyInfo() + .setDisplayName("") + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setAllUsers(true) + .setGroups(new UrnArray()) + .setUsers(new UrnArray())) + .setPrivileges(new StringArray(List.of(VIEW_PRIVILEGE))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria(new PolicyMatchCriterionArray())))) + .put( + "userA", + new DataHubPolicyInfo() + .setDisplayName("") + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setGroups(new UrnArray()) + .setUsers(new UrnArray(TEST_USER_A))) + .setPrivileges(new StringArray(List.of(VIEW_PRIVILEGE))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria(new PolicyMatchCriterionArray())))) + .put( + "allGroups", + new DataHubPolicyInfo() + .setDisplayName("") + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setAllGroups(true) + .setGroups(new UrnArray()) + .setUsers(new UrnArray())) + .setPrivileges(new StringArray(List.of(VIEW_PRIVILEGE))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria(new PolicyMatchCriterionArray())))) + .put( + "groupB", + new DataHubPolicyInfo() + .setDisplayName("") + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setGroups(new UrnArray(TEST_GROUP_B)) + .setUsers(new UrnArray())) + .setPrivileges(new StringArray(List.of(VIEW_PRIVILEGE))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria(new PolicyMatchCriterionArray())))) + .put( + "groupC", + new DataHubPolicyInfo() + .setDisplayName("") + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setGroups(new UrnArray(TEST_GROUP_C)) + .setUsers(new UrnArray())) + .setPrivileges(new StringArray(List.of(VIEW_PRIVILEGE))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria(new PolicyMatchCriterionArray())))) + .put( + "anyOwner", + new DataHubPolicyInfo() + .setDisplayName("") + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setResourceOwners(true) + .setGroups(new UrnArray()) + .setUsers(new UrnArray())) + .setPrivileges(new StringArray(List.of(VIEW_PRIVILEGE))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria(new PolicyMatchCriterionArray())))) + .put( + "businessOwner", + new DataHubPolicyInfo() + .setDisplayName("") + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setActors( + new DataHubActorFilter() + .setResourceOwners(true) + .setResourceOwnersTypes(new UrnArray(BUS_OWNER)) + .setGroups(new UrnArray()) + .setUsers(new UrnArray())) + .setPrivileges(new StringArray(List.of(VIEW_PRIVILEGE))) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria(new PolicyMatchCriterionArray())))) + .build(); + + /** User A is a technical owner of the result User B has no ownership */ + private static final Map>> TEST_OWNERSHIP = + ImmutableMap.>>builder() + .put(UNRESTRICTED_RESULT_URN, Map.of(TEST_USER_A, Set.of(TECH_OWNER))) + .build(); + @Test - public void testAllUserAllGroup() { - OperationContext allUsers = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllUsers(true)) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - OperationContext allGroups = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllGroups(true)) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + public void testAllUsersRestrictions() throws RemoteInvocationException, URISyntaxException { - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(allUsers), - Optional.empty(), - "Expected no ES filters for all user access without resource restrictions"); - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(allGroups), - Optional.empty(), - "Expected no ES filters for all user access without resource restrictions"); + // USER A + OperationContext userAContext = + sessionWithUserAGroupAandC(Set.of(TEST_POLICIES.get("allUsers"))); + + SearchResult result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(false)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + // USER B + OperationContext userBContext = sessionWithUserBNoGroup(Set.of(TEST_POLICIES.get("allUsers"))); + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBContext.withSearchFlags(flags -> flags.setIncludeRestricted(false)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); } @Test - public void testAllUserAllGroupEntityType() { - OperationContext resourceAllUsersPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllUsers(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("TYPE") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues(new StringArray("dataset", "chart")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - OperationContext resourceAllGroupsPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllGroups(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("TYPE") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues(new StringArray("dataset", "chart")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + public void testSingeUserRestrictions() throws RemoteInvocationException, URISyntaxException { - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "_index", List.of("datasetindex_v2", "chartindex_v2")))) - .minimumShouldMatch(1)), - "Expected index filter for each entity"); + // USER A + OperationContext userAContext = sessionWithUserAGroupAandC(Set.of(TEST_POLICIES.get("userA"))); + SearchResult result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(false)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + // USER B + OperationContext userBContext = sessionWithUserBNoGroup(Set.of(TEST_POLICIES.get("userA"))); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "_index", List.of("datasetindex_v2", "chartindex_v2")))) - .minimumShouldMatch(1)), - "Expected index filter for each entity"); + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected User B (not User A) to receive a restricted urn"); } @Test - public void testAllUserAllGroupUrn() { - OperationContext resourceAllUsersPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllUsers(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("URN") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues( - new StringArray( - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - OperationContext resourceAllGroupsPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllGroups(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("URN") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues( - new StringArray( - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + public void testAllGroupsRestrictions() throws RemoteInvocationException, URISyntaxException { - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "urn", - List.of( - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))) - .minimumShouldMatch(1)), - "Expected filter for each urn"); + // USER A + OperationContext userAContext = + sessionWithUserAGroupAandC(Set.of(TEST_POLICIES.get("allGroups"))); + + SearchResult result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(false)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + // USER B (No Groups!) + OperationContext userBContext = sessionWithUserBNoGroup(Set.of(TEST_POLICIES.get("allGroups"))); + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "urn", - List.of( - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.analytics.ShelterDogs,PROD)", - "urn:li:dataset:(urn:li:dataPlatform:snowflake,long_tail_companions.ecommerce.account,PROD)")))) - .minimumShouldMatch(1)), - "Expected filter for each urn"); + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected User B (no groups) to receive a restricted urn"); } @Test - public void testAllUserAllGroupTag() { - OperationContext resourceAllUsersPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllUsers(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("TAG") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues( - new StringArray( - "urn:li:tag:pii", "urn:li:tag:prod")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - OperationContext resourceAllGroupsPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllGroups(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("TAG") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues( - new StringArray( - "urn:li:tag:pii", "urn:li:tag:prod")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + public void testSingleGroupRestrictions() throws RemoteInvocationException, URISyntaxException { - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "tags.keyword", List.of("urn:li:tag:pii", "urn:li:tag:prod")))) - .minimumShouldMatch(1)), - "Expected filter each tag"); + // GROUP B Policy + // USER A + final OperationContext userAContext = + sessionWithUserAGroupAandC(Set.of(TEST_POLICIES.get("groupB"))); + SearchResult result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "tags.keyword", List.of("urn:li:tag:pii", "urn:li:tag:prod")))) - .minimumShouldMatch(1)), - "Expected filter each tag"); - } + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected restricted urn because not a member of Group B"); - @Test - public void testAllUserAllGroupDomain() { - OperationContext resourceAllUsersPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllUsers(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("DOMAIN") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues( - new StringArray( - "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", - "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - OperationContext resourceAllGroupsPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllGroups(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("DOMAIN") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues( - new StringArray( - "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", - "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + // USER B (No Groups!) + final OperationContext userBContext = + sessionWithUserBNoGroup(Set.of(TEST_POLICIES.get("groupB"))); + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "domains.keyword", - List.of( - "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", - "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))) - .minimumShouldMatch(1)), - "Expected filter each domain"); + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected User B (no groups) to receive a restricted urn"); + + // Group C Policy + // USER A + final OperationContext userAGroupCContext = + sessionWithUserAGroupAandC(Set.of(TEST_POLICIES.get("groupC"))); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAGroupCContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAGroupCContext.withSearchFlags(flags -> flags.setIncludeRestricted(false)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + // USER B (No Groups!) + final OperationContext userBgroupCContext = + sessionWithUserBNoGroup(Set.of(TEST_POLICIES.get("groupC"))); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBgroupCContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .filter( - QueryBuilders.termsQuery( - "domains.keyword", - List.of( - "urn:li:domain:f9229a0b-c5ad-47e7-9ff3-f4248c5cb634", - "urn:li:domain:7d64d0fa-66c3-445c-83db-3a324723daf8")))) - .minimumShouldMatch(1)), - "Expected filter each domain"); + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected User B (no groups) to receive a restricted urn"); } @Test - public void testAllUserAllGroupUnknownField() { - OperationContext resourceAllUsersPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllUsers(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("UNKNOWN FIELD") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues(new StringArray("dataset", "chart")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - OperationContext resourceAllGroupsPolicy = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setAllGroups(true)) - .setResources( - new DataHubResourceFilter() - .setFilter( - new PolicyMatchFilter() - .setCriteria( - new PolicyMatchCriterionArray( - new PolicyMatchCriterion() - .setField("UNKNOWN FIELD") - .setCondition(PolicyMatchCondition.EQUALS) - .setValues(new StringArray("dataset", "chart")))))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + public void testAnyOwnerRestrictions() throws RemoteInvocationException, URISyntaxException { - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllUsersPolicy), - Optional.of(QueryBuilders.boolQuery().mustNot(QueryBuilders.matchAllQuery())), - "Expected match-none query when an unknown field is encountered"); + // USER A + OperationContext userAContext = + sessionWithUserAGroupAandC(Set.of(TEST_POLICIES.get("anyOwner"))); + + SearchResult result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(false)), result); + assertEquals(result.getEntities().get(0).getEntity(), UNRESTRICTED_RESULT_URN); + // USER B (not an owner) + OperationContext userBContext = sessionWithUserBNoGroup(Set.of(TEST_POLICIES.get("anyOwner"))); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(resourceAllGroupsPolicy), - Optional.of(QueryBuilders.boolQuery().mustNot(QueryBuilders.matchAllQuery())), - "Expected match-none query when an unknown field is encountered"); + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected User B to receive a restricted urn because User B doesn't own anything"); } @Test - public void testUserGroupOwner() { - OperationContext ownerNoGroupsNoType = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors(new DataHubActorFilter().setResourceOwners(true)) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + public void testBusinessOwnerRestrictions() throws RemoteInvocationException, URISyntaxException { + + // USER A + final OperationContext userAContext = + sessionWithUserAGroupAandC(Set.of(TEST_POLICIES.get("businessOwner"))); + + SearchResult result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userAContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(ownerNoGroupsNoType), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.termsQuery( - "owners.keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .minimumShouldMatch(1)), - "Expected user filter for owners without group filter"); - - OperationContext ownerWithGroupsNoType = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors( - new DataHubActorFilter() - .setResourceOwners(true) - .setGroups(new UrnArray(TEST_GROUP_A))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected restricted urn because not a Business Owner"); + + // USER B + final OperationContext userBContext = + sessionWithUserBNoGroup(Set.of(TEST_POLICIES.get("businessOwner"))); + + result = mockSearchResult(); + ESAccessControlUtil.restrictSearchResult( + userBContext.withSearchFlags(flags -> flags.setIncludeRestricted(true)), result); assertEquals( - ESAccessControlUtil.buildAccessControlFilters(ownerWithGroupsNoType), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.termsQuery( - "owners.keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .minimumShouldMatch(1)), - "Expected user AND group filter for owners"); + result.getEntities().get(0).getEntity(), + RESTRICTED_RESULT_URN, + "Expected User B to receive a restricted urn because not a Business Owner"); } - @Test - public void testUserGroupOwnerTypes() { - OperationContext ownerTypeBusinessNoUserNoGroup = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors( - new DataHubActorFilter().setResourceOwnersTypes(new UrnArray(BUS_OWNER))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(ownerTypeBusinessNoUserNoGroup), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .should( - QueryBuilders.termsQuery( - "ownerTypes." - + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) - + ".keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .minimumShouldMatch(1)) - .minimumShouldMatch(1)), - "Expected user filter for business owner via user or group urn"); - - OperationContext ownerTypeBusinessMultiUserNoGroup = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors( - new DataHubActorFilter() - .setResourceOwnersTypes(new UrnArray(BUS_OWNER)) - .setUsers(new UrnArray(List.of(TEST_USER_A, TEST_USER_B)))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(ownerTypeBusinessMultiUserNoGroup), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .should( - QueryBuilders.termsQuery( - "ownerTypes." - + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) - + ".keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .minimumShouldMatch(1)) - .minimumShouldMatch(1)), - "Expected user filter for `business owner` by owner user/group A urn (excluding other user/group B)"); - - OperationContext ownerWithGroupsBusTechMultiGroup = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors( - new DataHubActorFilter() - .setResourceOwnersTypes(new UrnArray(BUS_OWNER, TECH_OWNER)) - .setGroups(new UrnArray(TEST_GROUP_A, TEST_GROUP_B))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(ownerWithGroupsBusTechMultiGroup), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .should( - QueryBuilders.termsQuery( - "ownerTypes." - + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) - + ".keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .should( - QueryBuilders.termsQuery( - "ownerTypes." - + OwnerTypeMap.encodeFieldName(TECH_OWNER.toString()) - + ".keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .minimumShouldMatch(1)) - .minimumShouldMatch(1)), - "Expected filter for business owner or technical owner by group A (excluding other group B and owner privilege)"); - - OperationContext ownerWithMultiUserMultiGroupsBusTech = - sessionWithPolicy( - Set.of( - new DataHubPolicyInfo() - .setState(PoliciesConfig.ACTIVE_POLICY_STATE) - .setType(PoliciesConfig.METADATA_POLICY_TYPE) - .setActors( - new DataHubActorFilter() - .setResourceOwnersTypes(new UrnArray(BUS_OWNER, TECH_OWNER)) - .setUsers(new UrnArray(List.of(TEST_USER_A, TEST_USER_B))) - .setGroups(new UrnArray(TEST_GROUP_A, TEST_GROUP_B))) - .setPrivileges( - new StringArray(List.of(PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType()))))); - assertEquals( - ESAccessControlUtil.buildAccessControlFilters(ownerWithMultiUserMultiGroupsBusTech), - Optional.of( - QueryBuilders.boolQuery() - .should( - QueryBuilders.boolQuery() - .should( - QueryBuilders.termsQuery( - "ownerTypes." - + OwnerTypeMap.encodeFieldName(BUS_OWNER.toString()) - + ".keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .should( - QueryBuilders.termsQuery( - "ownerTypes." - + OwnerTypeMap.encodeFieldName(TECH_OWNER.toString()) - + ".keyword", - List.of( - TEST_USER_A.toString().toLowerCase(), - TEST_GROUP_A.toString().toLowerCase(), - TEST_GROUP_C.toString().toLowerCase()))) - .minimumShouldMatch(1)) - .minimumShouldMatch(1)), - "Expected filter for business owner or technical owner by user A and group A (excluding other group B and owner privilege)"); + private static RestrictedService mockRestrictedService() { + RestrictedService mockRestrictedService = mock(RestrictedService.class); + when(mockRestrictedService.encryptRestrictedUrn(any())) + .thenAnswer( + args -> { + Urn urn = args.getArgument(0); + return UrnUtils.getUrn(urn.toString().replace("urn:li:dataset", "urn:li:restricted")); + }); + return mockRestrictedService; + } + + private static SearchResult mockSearchResult() { + SearchResult result = new SearchResult(); + result.setFrom(0); + result.setPageSize(10); + result.setNumEntities(1); + result.setEntities( + new SearchEntityArray( + new SearchEntity() + .setEntity(UNRESTRICTED_RESULT_URN) + .setMatchedFields(new MatchedFieldArray()))); + result.setMetadata(mock(SearchResultMetadata.class)); + return result; + } + + private static OperationContext sessionWithUserAGroupAandC(Set policies) + throws RemoteInvocationException, URISyntaxException { + return sessionWithUserGroups(USER_A_AUTH, policies, List.of(TEST_GROUP_A, TEST_GROUP_C)); + } + + private static OperationContext sessionWithUserBNoGroup(Set policies) + throws RemoteInvocationException, URISyntaxException { + return sessionWithUserGroups(USER_B_AUTH, policies, List.of()); } - private static OperationContext sessionWithPolicy(Set policies) { - return sessionWithPolicy(policies, List.of(TEST_GROUP_A, TEST_GROUP_C)); + private static OperationContext sessionWithUserGroups( + Authentication auth, Set policies, List groups) + throws RemoteInvocationException, URISyntaxException { + Urn actorUrn = UrnUtils.getUrn(auth.getActor().toUrnStr()); + Authorizer dataHubAuthorizer = + new TestDataHubAuthorizer(policies, Map.of(actorUrn, groups), TEST_OWNERSHIP); + return ENABLED_CONTEXT.asSession(RequestContext.TEST, dataHubAuthorizer, auth); } - private static OperationContext sessionWithPolicy( - Set policies, List groups) { - Authorizer mockAuthorizer = mock(Authorizer.class); - when(mockAuthorizer.getActorPolicies(eq(UrnUtils.getUrn(USER_AUTH.getActor().toUrnStr())))) - .thenReturn(policies); - when(mockAuthorizer.getActorGroups(eq(UrnUtils.getUrn(USER_AUTH.getActor().toUrnStr())))) - .thenReturn(groups); + public static class TestDataHubAuthorizer extends DataHubAuthorizer { + + public TestDataHubAuthorizer( + @Nonnull Set policies, + @Nonnull Map> userGroups, + @Nonnull Map>> resourceOwnerTypes) + throws RemoteInvocationException, URISyntaxException { + super( + ENABLED_CONTEXT, + mockUserGroupEntityClient(userGroups, resourceOwnerTypes), + 0, + 0, + AuthorizationMode.DEFAULT, + 0); + + DefaultEntitySpecResolver specResolver = + new DefaultEntitySpecResolver( + ENABLED_CONTEXT.getSystemAuthentication().get(), + mockUserGroupEntityClient(userGroups, resourceOwnerTypes)); + + AuthorizerContext ctx = mock(AuthorizerContext.class); + when(ctx.getEntitySpecResolver()).thenReturn(specResolver); + init(Map.of(), ctx); + + readWriteLock.writeLock().lock(); + try { + policyCache.clear(); + Map> byPrivilegeName = + policies.stream() + .flatMap( + policy -> policy.getPrivileges().stream().map(priv -> Pair.of(priv, policy))) + .collect( + Collectors.groupingBy( + Pair::getKey, Collectors.mapping(Pair::getValue, Collectors.toList()))); + policyCache.putAll(byPrivilegeName); + } finally { + readWriteLock.writeLock().unlock(); + } + } + + private static EntityClient mockUserGroupEntityClient( + @Nonnull Map> userGroups, + @Nonnull Map>> resourceOwnerTypes) + throws RemoteInvocationException, URISyntaxException { + EntityClient mockEntityClient = mock(EntityClient.class); + when(mockEntityClient.batchGetV2(anyString(), anySet(), anySet(), any())) + .thenAnswer( + args -> { + String entityType = args.getArgument(0); + Set urns = args.getArgument(1); + Set aspectNames = args.getArgument(2); + + switch (entityType) { + case CORP_USER_ENTITY_NAME: + if (aspectNames.contains(GROUP_MEMBERSHIP_ASPECT_NAME)) { + return urns.stream() + .filter(userGroups::containsKey) + .map( + urn -> + Pair.of( + urn, + new EntityResponse() + .setUrn(urn) + .setEntityName(entityType) + .setAspects( + new EnvelopedAspectMap( + new EnvelopedAspectMap( + Map.of( + GROUP_MEMBERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setName(GROUP_MEMBERSHIP_ASPECT_NAME) + .setValue( + new Aspect( + new GroupMembership() + .setGroups( + new UrnArray( + userGroups.get( + urn))) + .data())))))))) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + } + return Map.of(); + case DATASET_ENTITY_NAME: + if (aspectNames.contains(OWNERSHIP_ASPECT_NAME)) { + return urns.stream() + .filter(resourceOwnerTypes::containsKey) + .map( + urn -> + Pair.of( + urn, + new EntityResponse() + .setUrn(urn) + .setEntityName(entityType) + .setAspects( + new EnvelopedAspectMap( + new EnvelopedAspectMap( + Map.of( + OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setName(OWNERSHIP_ASPECT_NAME) + .setValue( + new Aspect( + new Ownership() + .setOwners( + new OwnerArray( + resourceOwnerTypes + .get(urn) + .keySet() + .stream() + .flatMap( + ownerUrn -> + resourceOwnerTypes + .get( + urn) + .get( + ownerUrn) + .stream() + .map( + ownerTypeUrn -> + new Owner() + .setTypeUrn( + ownerTypeUrn) + .setOwner( + ownerUrn) + .setType( + OwnershipType + .CUSTOM))) + .collect( + Collectors + .toSet()))) + .data())))))))) + .collect(Collectors.toMap(Pair::getKey, Pair::getValue)); + } + return Map.of(); + default: + return Map.of(); + } + }); - return ENABLED_CONTEXT.asSession(mockAuthorizer, USER_AUTH); + // call batch interface above + when(mockEntityClient.getV2(anyString(), any(), anySet(), any())) + .thenAnswer( + args -> { + Urn entityUrn = args.getArgument(1); + Map batchResponse = + mockEntityClient.batchGetV2( + args.getArgument(0), + Set.of(entityUrn), + args.getArgument(2), + args.getArgument(3)); + return batchResponse.get(entityUrn); + }); + return mockEntityClient; + } } } diff --git a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java index c27a1c337ed5c..6159c8118ec51 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java +++ b/metadata-io/src/test/java/io/datahubproject/test/DataGenerator.java @@ -162,11 +162,13 @@ public Stream> generateMCPs( if (generateDefaultAspects) { // Expand with default aspects instead of relying on default generation return Stream.concat( - Stream.of(mcp), + // Remove duplicate key aspects (generated as default aspects) + Stream.of(mcp).filter(m -> !m.getAspectName().endsWith("Key")), DefaultAspectsUtil.getAdditionalChanges( AspectsBatchImpl.builder() .mcps(List.of(mcp), auditStamp, entityService) - .build(), + .build() + .getMCPItems(), entityService, true) .stream() diff --git a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java index 5dceed80f6542..5a69db19f5c9e 100644 --- a/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java +++ b/metadata-io/src/test/java/io/datahubproject/test/search/SearchTestUtils.java @@ -100,7 +100,8 @@ public static SearchResult searchAcrossEntities( String query, Filter filter) { return searchService.searchAcrossEntities( - opContext.withSearchFlags(flags -> flags.setFulltext(true).setSkipCache(true)), + opContext.withSearchFlags( + flags -> flags.setFulltext(true).setSkipCache(true).setSkipHighlighting(false)), entityNames, query, filter, diff --git a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java index b409a41600bd7..35aae8c16c445 100644 --- a/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java +++ b/metadata-jobs/mae-consumer-job/src/test/java/com/linkedin/metadata/kafka/MaeConsumerApplicationTestConfiguration.java @@ -8,6 +8,8 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.search.elasticsearch.indexbuilder.EntityIndexBuilders; import com.linkedin.metadata.systemmetadata.ElasticSearchSystemMetadataService; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import io.ebean.Database; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; @@ -25,6 +27,10 @@ public class MaeConsumerApplicationTestConfiguration { @MockBean private EntityRegistry entityRegistry; + @MockBean private RestrictedService restrictedService; + + @MockBean private SecretService secretService; + @MockBean private GraphService _graphService; @MockBean private ElasticSearchSystemMetadataService _elasticSearchSystemMetadataService; diff --git a/metadata-jobs/mae-consumer/build.gradle b/metadata-jobs/mae-consumer/build.gradle index 2e068d5a3501e..4fc1944388970 100644 --- a/metadata-jobs/mae-consumer/build.gradle +++ b/metadata-jobs/mae-consumer/build.gradle @@ -16,7 +16,7 @@ dependencies { exclude group: 'org.neo4j.test' } implementation project(':metadata-service:auth-config') - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation project(':metadata-io') implementation project(':ingestion-scheduler') implementation project(':metadata-utils') diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java index 5b5a4ab072109..5cad4ca4b9aac 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/elasticsearch/ElasticsearchConnector.java @@ -27,7 +27,8 @@ public ElasticsearchConnector(ESBulkProcessor bulkProcessor, int numRetries) { public void feedElasticEvent(@Nonnull ElasticEvent event) { if (event.getActionType().equals(ChangeType.DELETE)) { _bulkProcessor.add(createDeleteRequest(event)); - } else if (event.getActionType().equals(ChangeType.CREATE)) { + } else if (event.getActionType().equals(ChangeType.CREATE) + || event.getActionType().equals(ChangeType.CREATE_ENTITY)) { _bulkProcessor.add(createIndexRequest(event)); } else if (event.getActionType().equals(ChangeType.UPDATE)) { _bulkProcessor.add(createUpsertRequest(event)); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java index cddfae227b619..1172193fdfe30 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/form/FormAssignmentHook.java @@ -52,7 +52,8 @@ public class FormAssignmentHook implements MetadataChangeLogHook { private static final Set SUPPORTED_UPDATE_TYPES = - ImmutableSet.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.RESTATE); + ImmutableSet.of( + ChangeType.UPSERT, ChangeType.CREATE, ChangeType.CREATE_ENTITY, ChangeType.RESTATE); private final FormService _formService; private final boolean _isEnabled; diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java index cc34884588979..658912b0203e9 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/incident/IncidentsSummaryHook.java @@ -51,7 +51,8 @@ public class IncidentsSummaryHook implements MetadataChangeLogHook { private static final Set SUPPORTED_UPDATE_TYPES = - ImmutableSet.of(ChangeType.UPSERT, ChangeType.CREATE, ChangeType.RESTATE); + ImmutableSet.of( + ChangeType.UPSERT, ChangeType.CREATE, ChangeType.CREATE_ENTITY, ChangeType.RESTATE); private static final Set SUPPORTED_UPDATE_ASPECTS = ImmutableSet.of(INCIDENT_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); diff --git a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java index 2019934e581fe..c2a5faa987d99 100644 --- a/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java +++ b/metadata-jobs/mae-consumer/src/main/java/com/linkedin/metadata/kafka/hook/ingestion/IngestionSchedulerHook.java @@ -89,6 +89,7 @@ private boolean isIngestionSourceUpdate(final MetadataChangeLog event) { return Constants.INGESTION_INFO_ASPECT_NAME.equals(event.getAspectName()) && (ChangeType.UPSERT.equals(event.getChangeType()) || ChangeType.CREATE.equals(event.getChangeType()) + || ChangeType.CREATE_ENTITY.equals(event.getChangeType()) || ChangeType.DELETE.equals(event.getChangeType())); } diff --git a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java index 789918a4b164c..74c8f2511feaf 100644 --- a/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java +++ b/metadata-jobs/mae-consumer/src/test/java/com/linkedin/metadata/kafka/hook/spring/MCLSpringTestConfiguration.java @@ -22,6 +22,7 @@ import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.ServicesRegistryContext; import io.datahubproject.test.metadata.context.TestOperationContexts; import org.apache.avro.generic.GenericRecord; import org.springframework.beans.factory.annotation.Qualifier; @@ -94,8 +95,9 @@ public OperationContext operationContext( .thenReturn(TestOperationContexts.TEST_SYSTEM_AUTH.getActor()); return OperationContext.asSystem( OperationContextConfig.builder().build(), - entityRegistry, systemAuthentication, + entityRegistry, + mock(ServicesRegistryContext.class), indexConvention); } } diff --git a/metadata-jobs/mce-consumer-job/build.gradle b/metadata-jobs/mce-consumer-job/build.gradle index 3370838974bf7..49c69626c211c 100644 --- a/metadata-jobs/mce-consumer-job/build.gradle +++ b/metadata-jobs/mce-consumer-job/build.gradle @@ -38,6 +38,7 @@ dependencies { annotationProcessor externalDependency.lombok testImplementation project(':metadata-dao-impl:kafka-producer') + testImplementation project(':metadata-service:restli-client') testImplementation externalDependency.springBootTest testImplementation externalDependency.mockito testImplementation externalDependency.testng diff --git a/metadata-jobs/mce-consumer/build.gradle b/metadata-jobs/mce-consumer/build.gradle index 49604924acb68..17ec825712471 100644 --- a/metadata-jobs/mce-consumer/build.gradle +++ b/metadata-jobs/mce-consumer/build.gradle @@ -21,7 +21,7 @@ dependencies { implementation project(':metadata-events:mxe-registration') implementation project(':metadata-events:mxe-utils-avro') implementation project(':metadata-io') - implementation project(':metadata-service:restli-client') + implementation project(':metadata-service:restli-client-api') implementation spec.product.pegasus.restliClient implementation spec.product.pegasus.restliCommon implementation externalDependency.elasticSearchRest diff --git a/metadata-models/src/main/pegasus/com/linkedin/events/metadata/ChangeType.pdl b/metadata-models/src/main/pegasus/com/linkedin/events/metadata/ChangeType.pdl index 4e7ac6788cc7c..55e8977f8c4a9 100644 --- a/metadata-models/src/main/pegasus/com/linkedin/events/metadata/ChangeType.pdl +++ b/metadata-models/src/main/pegasus/com/linkedin/events/metadata/ChangeType.pdl @@ -10,7 +10,6 @@ enum ChangeType { UPSERT /** - * NOT SUPPORTED YET * insert if not exists. otherwise fail */ CREATE @@ -22,13 +21,11 @@ enum ChangeType { UPDATE /** - * NOT SUPPORTED YET * delete action */ DELETE /** - * NOT SUPPORTED YET * patch the changes instead of full replace */ PATCH @@ -37,4 +34,9 @@ enum ChangeType { * Restate an aspect, eg. in a index refresh. */ RESTATE + + /** + * insert if entity not exists. otherwise fail + */ + CREATE_ENTITY } \ No newline at end of file diff --git a/metadata-models/src/main/resources/entity-registry.yml b/metadata-models/src/main/resources/entity-registry.yml index b335c66cc0bb7..55b96555e02b9 100644 --- a/metadata-models/src/main/resources/entity-registry.yml +++ b/metadata-models/src/main/resources/entity-registry.yml @@ -576,22 +576,22 @@ plugins: - className: 'com.linkedin.metadata.aspect.validation.StructuredPropertiesValidator' enabled: true supportedOperations: + - CREATE - UPSERT supportedEntityAspectNames: - entityName: '*' aspectName: structuredProperties - mutationHooks: - - className: 'com.linkedin.metadata.aspect.hooks.StructuredPropertiesSoftDelete' + - className: 'com.linkedin.metadata.aspect.validation.CreateIfNotExistsValidator' enabled: true + supportedOperations: + - CREATE + - CREATE_ENTITY supportedEntityAspectNames: - entityName: '*' - aspectName: structuredProperties - - className: 'com.linkedin.metadata.aspect.hooks.OwnerTypeMap' + aspectName: '*' + mutationHooks: + - className: 'com.linkedin.metadata.aspect.hooks.StructuredPropertiesSoftDelete' enabled: true - supportedOperations: - - UPSERT - - CREATE - - RESTATE supportedEntityAspectNames: - entityName: '*' - aspectName: ownership \ No newline at end of file + aspectName: structuredProperties diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/EmptyContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/EmptyContext.java new file mode 100644 index 0000000000000..b9e598b69d944 --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/EmptyContext.java @@ -0,0 +1,12 @@ +package io.datahubproject.metadata.context; + +import java.util.Optional; + +public class EmptyContext implements ContextInterface { + public static final EmptyContext EMPTY = new EmptyContext(); + + @Override + public Optional getCacheKeyComponent() { + return Optional.empty(); + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContext.java index d2c038c26e325..df1401bb68306 100644 --- a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContext.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContext.java @@ -51,6 +51,7 @@ public class OperationContext { */ public static OperationContext asSession( OperationContext systemOperationContext, + @Nonnull RequestContext requestContext, @Nonnull Authorizer authorizer, @Nonnull Authentication sessionAuthentication, boolean allowSystemAuthentication) { @@ -61,6 +62,9 @@ public static OperationContext asSession( .allowSystemAuthentication(allowSystemAuthentication) .build()) .authorizerContext(AuthorizerContext.builder().authorizer(authorizer).build()) + .requestContext(requestContext) + // Initialize view authorization for user viewable urn tracking + .viewAuthorizationContext(ViewAuthorizationContext.builder().build()) .build(sessionAuthentication); } @@ -92,23 +96,29 @@ public static OperationContext withSearchFlags( */ public static OperationContext asSystem( @Nonnull OperationContextConfig config, - @Nonnull EntityRegistry entityRegistry, @Nonnull Authentication systemAuthentication, - @Nonnull IndexConvention indexConvention) { + @Nullable EntityRegistry entityRegistry, + @Nullable ServicesRegistryContext servicesRegistryContext, + @Nullable IndexConvention indexConvention) { ActorContext systemActorContext = ActorContext.builder().systemAuth(true).authentication(systemAuthentication).build(); OperationContextConfig systemConfig = config.toBuilder().allowSystemAuthentication(true).build(); SearchContext systemSearchContext = - SearchContext.builder().indexConvention(indexConvention).build(); + indexConvention == null + ? SearchContext.EMPTY + : SearchContext.builder().indexConvention(indexConvention).build(); return OperationContext.builder() .operationContextConfig(systemConfig) .systemActorContext(systemActorContext) .searchContext(systemSearchContext) .entityRegistryContext( - EntityRegistryContext.builder().entityRegistry(entityRegistry).build()) + entityRegistry == null + ? null + : EntityRegistryContext.builder().entityRegistry(entityRegistry).build()) + .servicesRegistryContext(servicesRegistryContext) // Authorizer.EMPTY doesn't actually apply to system auth .authorizerContext(AuthorizerContext.builder().authorizer(Authorizer.EMPTY).build()) .build(systemAuthentication); @@ -119,7 +129,10 @@ public static OperationContext asSystem( @Nullable private final ActorContext systemActorContext; @Nonnull private final SearchContext searchContext; @Nonnull private final AuthorizerContext authorizerContext; - @Nonnull private final EntityRegistryContext entityRegistryContext; + @Nullable private final EntityRegistryContext entityRegistryContext; + @Nullable private final ServicesRegistryContext servicesRegistryContext; + @Nullable private final RequestContext requestContext; + @Nullable private final ViewAuthorizationContext viewAuthorizationContext; public OperationContext withSearchFlags( @Nonnull Function flagDefaults) { @@ -127,17 +140,22 @@ public OperationContext withSearchFlags( } public OperationContext asSession( - @Nonnull Authorizer authorizer, @Nonnull Authentication sessionAuthentication) { + @Nonnull RequestContext requestContext, + @Nonnull Authorizer authorizer, + @Nonnull Authentication sessionAuthentication) { return OperationContext.asSession( this, + requestContext, authorizer, sessionAuthentication, getOperationContextConfig().isAllowSystemAuthentication()); } - @Nonnull + @Nullable public EntityRegistry getEntityRegistry() { - return getEntityRegistryContext().getEntityRegistry(); + return Optional.ofNullable(getEntityRegistryContext()) + .map(EntityRegistryContext::getEntityRegistry) + .orElse(null); } /** @@ -201,6 +219,10 @@ public AuditStamp getAuditStamp() { return getAuditStamp(null); } + public Optional getViewAuthorizationContext() { + return Optional.ofNullable(viewAuthorizationContext); + } + /** * Return a unique id for this context. Typically useful for building cache keys. We combine the * different context components to create a single string representation of the hashcode across @@ -216,9 +238,21 @@ public String getGlobalContextId() { ImmutableSet.builder() .add(getOperationContextConfig()) .add(getAuthorizerContext()) - .add(getActorContext()) + .add(getSessionActorContext()) .add(getSearchContext()) - .add(getEntityRegistryContext()) + .add( + getEntityRegistryContext() == null + ? EmptyContext.EMPTY + : getEntityRegistryContext()) + .add( + getServicesRegistryContext() == null + ? EmptyContext.EMPTY + : getServicesRegistryContext()) + .add(getRequestContext() == null ? EmptyContext.EMPTY : getRequestContext()) + .add( + getViewAuthorizationContext().isPresent() + ? getViewAuthorizationContext().get() + : EmptyContext.EMPTY) .build() .stream() .map(ContextInterface::getCacheKeyComponent) @@ -232,9 +266,16 @@ public String getSearchContextId() { return String.valueOf( ImmutableSet.builder() .add(getOperationContextConfig()) - .add(getActorContext()) + .add(getSessionActorContext()) .add(getSearchContext()) - .add(getEntityRegistryContext()) + .add( + getEntityRegistryContext() == null + ? EmptyContext.EMPTY + : getEntityRegistryContext()) + .add( + getServicesRegistryContext() == null + ? EmptyContext.EMPTY + : getServicesRegistryContext()) .build() .stream() .map(ContextInterface::getCacheKeyComponent) @@ -248,8 +289,15 @@ public String getEntityContextId() { return String.valueOf( ImmutableSet.builder() .add(getOperationContextConfig()) - .add(getActorContext()) - .add(getEntityRegistryContext()) + .add(getSessionActorContext()) + .add( + getEntityRegistryContext() == null + ? EmptyContext.EMPTY + : getEntityRegistryContext()) + .add( + getServicesRegistryContext() == null + ? EmptyContext.EMPTY + : getServicesRegistryContext()) .build() .stream() .map(ContextInterface::getCacheKeyComponent) @@ -268,14 +316,20 @@ public OperationContext build(@Nonnull Authentication sessionAuthentication) { .authentication(sessionAuthentication) .systemAuth( this.systemActorContext != null - && this.systemActorContext.getAuthentication().equals(sessionAuthentication)) + && this.systemActorContext + .getAuthentication() + .getActor() + .equals(sessionAuthentication.getActor())) .policyInfoSet(this.authorizerContext.getAuthorizer().getActorPolicies(actorUrn)) .groupMembership(this.authorizerContext.getAuthorizer().getActorGroups(actorUrn)) .build(), this.systemActorContext, Objects.requireNonNull(this.searchContext), Objects.requireNonNull(this.authorizerContext), - Objects.requireNonNull(this.entityRegistryContext)); + this.entityRegistryContext, + this.servicesRegistryContext, + this.requestContext, + this.viewAuthorizationContext); } private OperationContext build() { diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContextConfig.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContextConfig.java index f0e12f5a0ce2b..121e5530605f2 100644 --- a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContextConfig.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/OperationContextConfig.java @@ -1,6 +1,6 @@ package io.datahubproject.metadata.context; -import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import com.datahub.authorization.config.ViewAuthorizationConfiguration; import java.util.Optional; import lombok.Builder; import lombok.Getter; @@ -15,10 +15,10 @@ public class OperationContextConfig implements ContextInterface { private final boolean allowSystemAuthentication; /** Configuration for search authorization */ - private final SearchAuthorizationConfiguration searchAuthorizationConfiguration; + private final ViewAuthorizationConfiguration viewAuthorizationConfiguration; @Override public Optional getCacheKeyComponent() { - return Optional.of(searchAuthorizationConfiguration.hashCode()); + return Optional.of(viewAuthorizationConfiguration.hashCode()); } } diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/RequestContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/RequestContext.java new file mode 100644 index 0000000000000..83090b9235701 --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/RequestContext.java @@ -0,0 +1,88 @@ +package io.datahubproject.metadata.context; + +import java.util.Arrays; +import java.util.Collection; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import java.util.Set; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.Builder; +import lombok.Getter; + +@Getter +@Builder +public class RequestContext implements ContextInterface { + @Nonnull + public static final RequestContext TEST = + RequestContext.builder().requestID("test").requestAPI(RequestAPI.TEST).build(); + + @Nonnull private final RequestAPI requestAPI; + + /** + * i.e. graphql query name or OpenAPI operation id, etc. Intended use case is for log messages and + * monitoring + */ + @Nonnull private final String requestID; + + @Override + public Optional getCacheKeyComponent() { + return Optional.empty(); + } + + public static class RequestContextBuilder { + private RequestContext build() { + return new RequestContext(this.requestAPI, this.requestID); + } + + public RequestContext buildGraphql(@Nonnull String queryName, Map variables) { + requestAPI(RequestAPI.GRAPHQL); + requestID(buildRequestId(queryName, Set.of())); + return build(); + } + + public RequestContext buildRestli(String action, @Nullable String entityName) { + return buildRestli(action, entityName == null ? null : List.of(entityName)); + } + + public RequestContext buildRestli(@Nonnull String action, @Nullable String[] entityNames) { + return buildRestli( + action, + entityNames == null ? null : Arrays.stream(entityNames).collect(Collectors.toList())); + } + + public RequestContext buildRestli(String action, @Nullable Collection entityNames) { + requestAPI(RequestAPI.RESTLI); + requestID(buildRequestId(action, entityNames)); + return build(); + } + + public RequestContext buildOpenapi(@Nonnull String action, @Nullable String entityName) { + return buildOpenapi(action, entityName == null ? null : List.of(entityName)); + } + + public RequestContext buildOpenapi( + @Nonnull String action, @Nullable Collection entityNames) { + requestAPI(RequestAPI.OPENAPI); + requestID(buildRequestId(action, entityNames)); + return build(); + } + + private static String buildRequestId( + @Nonnull String action, @Nullable Collection entityNames) { + return entityNames == null || entityNames.isEmpty() + ? action + : String.format( + "%s(%s)", action, entityNames.stream().distinct().collect(Collectors.toList())); + } + } + + public enum RequestAPI { + TEST, + RESTLI, + OPENAPI, + GRAPHQL + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ServicesRegistryContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ServicesRegistryContext.java new file mode 100644 index 0000000000000..8149d2abe163c --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ServicesRegistryContext.java @@ -0,0 +1,19 @@ +package io.datahubproject.metadata.context; + +import io.datahubproject.metadata.services.RestrictedService; +import java.util.Optional; +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Getter; + +@Getter +@Builder +public class ServicesRegistryContext implements ContextInterface { + + @Nonnull private final RestrictedService restrictedService; + + @Override + public Optional getCacheKeyComponent() { + return Optional.empty(); + } +} diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ViewAuthorizationContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ViewAuthorizationContext.java new file mode 100644 index 0000000000000..5204d7bf5f98f --- /dev/null +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/ViewAuthorizationContext.java @@ -0,0 +1,38 @@ +package io.datahubproject.metadata.context; + +import com.linkedin.common.urn.Urn; +import java.util.Collection; +import java.util.Optional; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; +import javax.annotation.Nonnull; +import lombok.Builder; +import lombok.Getter; + +@Getter +@Builder +public class ViewAuthorizationContext implements ContextInterface { + + /** + * Graphql has a lot of redundant `canView` authorization checks, to reduce the repeated checks + * for view authorization, maintain a list of urns that have already been identified as viewable + * for the request. + */ + @Nonnull @Builder.Default private Set viewableUrns = ConcurrentHashMap.newKeySet(); + + public boolean canView(@Nonnull Collection urns) { + if (urns.isEmpty()) { + return false; + } + return viewableUrns.containsAll(urns); + } + + public void addViewableUrns(@Nonnull Collection urns) { + viewableUrns.addAll(urns); + } + + @Override + public Optional getCacheKeyComponent() { + return Optional.empty(); + } +} diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RestrictedService.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/services/RestrictedService.java similarity index 56% rename from metadata-service/services/src/main/java/com/linkedin/metadata/service/RestrictedService.java rename to metadata-operation-context/src/main/java/io/datahubproject/metadata/services/RestrictedService.java index d1aa8e9f5dbb5..5934892248d4d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RestrictedService.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/services/RestrictedService.java @@ -1,22 +1,22 @@ -package com.linkedin.metadata.service; +package io.datahubproject.metadata.services; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.secret.SecretService; import javax.annotation.Nonnull; public class RestrictedService { + public static final String RESTRICTED_ENTITY_TYPE = "restricted"; - private final SecretService _secretService; + private final SecretService secretService; public RestrictedService(@Nonnull SecretService secretService) { - this._secretService = secretService; + this.secretService = secretService; } public Urn encryptRestrictedUrn(@Nonnull final Urn entityUrn) { - final String encryptedEntityUrn = this._secretService.encrypt(entityUrn.toString()); + final String encryptedEntityUrn = this.secretService.encrypt(entityUrn.toString()); try { - return new Urn("restricted", encryptedEntityUrn); + return new Urn(RESTRICTED_ENTITY_TYPE, encryptedEntityUrn); } catch (Exception e) { throw new RuntimeException("Error when creating restricted entity urn", e); } @@ -24,6 +24,6 @@ public Urn encryptRestrictedUrn(@Nonnull final Urn entityUrn) { public Urn decryptRestrictedUrn(@Nonnull final Urn restrictedUrn) { final String encryptedUrn = restrictedUrn.getId(); - return UrnUtils.getUrn(this._secretService.decrypt(encryptedUrn)); + return UrnUtils.getUrn(this.secretService.decrypt(encryptedUrn)); } } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/services/SecretService.java similarity index 98% rename from metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java rename to metadata-operation-context/src/main/java/io/datahubproject/metadata/services/SecretService.java index a735374b54858..bea03235abfb4 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/secret/SecretService.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/services/SecretService.java @@ -1,4 +1,4 @@ -package com.linkedin.metadata.secret; +package io.datahubproject.metadata.services; import java.io.ByteArrayOutputStream; import java.io.IOException; diff --git a/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java b/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java index 88e2f7f04ca5a..63e3406a877fb 100644 --- a/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/test/metadata/context/TestOperationContexts.java @@ -3,14 +3,14 @@ import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; -import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import com.datahub.authorization.config.ViewAuthorizationConfiguration; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; -import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.RequestContext; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -23,46 +23,69 @@ public class TestOperationContexts { new Authentication(new Actor(ActorType.USER, "testSystemUser"), ""); public static final Authentication TEST_USER_AUTH = new Authentication(new Actor(ActorType.USER, "datahub"), ""); - public static final IndexConvention TEST_EMPTY_INDEX_CONVENTION = IndexConventionImpl.NO_PREFIX; public static OperationContext systemContextNoSearchAuthorization( - @Nonnull EntityRegistry entityRegistry) { + @Nullable EntityRegistry entityRegistry) { return systemContextNoSearchAuthorization(entityRegistry, null); } + public static OperationContext systemContextNoSearchAuthorization() { + return systemContextNoSearchAuthorization(null, null); + } + public static OperationContext systemContextNoSearchAuthorization( - @Nonnull EntityRegistry entityRegistry, @Nullable IndexConvention indexConvention) { + @Nullable EntityRegistry entityRegistry, @Nullable IndexConvention indexConvention) { return OperationContext.asSystem( OperationContextConfig.builder() - .searchAuthorizationConfiguration( - SearchAuthorizationConfiguration.builder().enabled(false).build()) + .viewAuthorizationConfiguration( + ViewAuthorizationConfiguration.builder().enabled(false).build()) .build(), - entityRegistry, TEST_SYSTEM_AUTH, - indexConvention != null ? indexConvention : TEST_EMPTY_INDEX_CONVENTION); + entityRegistry, + null, + indexConvention); + } + + public static OperationContext userContextNoSearchAuthorization( + @Nullable EntityRegistry entityRegistry) { + return userContextNoSearchAuthorization(Authorizer.EMPTY, TEST_USER_AUTH, entityRegistry); + } + + public static OperationContext userContextNoSearchAuthorization( + @Nonnull Authorizer authorizer, @Nonnull Urn userUrn) { + return userContextNoSearchAuthorization(authorizer, userUrn, null); + } + + public static OperationContext userContextNoSearchAuthorization(@Nonnull Urn userUrn) { + return userContextNoSearchAuthorization(Authorizer.EMPTY, userUrn, null); } public static OperationContext userContextNoSearchAuthorization( - @Nonnull EntityRegistry entityRegistry, @Nonnull Urn userUrn) { - return userContextNoSearchAuthorization(entityRegistry, Authorizer.EMPTY, userUrn); + @Nonnull Urn userUrn, @Nullable EntityRegistry entityRegistry) { + return userContextNoSearchAuthorization(Authorizer.EMPTY, userUrn, entityRegistry); } public static OperationContext userContextNoSearchAuthorization( - @Nonnull EntityRegistry entityRegistry, @Nonnull Authorizer authorizer, - @Nonnull Urn userUrn) { + @Nonnull Urn userUrn, + @Nullable EntityRegistry entityRegistry) { return userContextNoSearchAuthorization( - entityRegistry, authorizer, - new Authentication(new Actor(ActorType.USER, userUrn.getId()), "")); + new Authentication(new Actor(ActorType.USER, userUrn.getId()), ""), + entityRegistry); + } + + public static OperationContext userContextNoSearchAuthorization( + @Nonnull Authorizer authorizer, @Nonnull Authentication sessionAuthorization) { + return userContextNoSearchAuthorization(authorizer, sessionAuthorization, null); } public static OperationContext userContextNoSearchAuthorization( - @Nonnull EntityRegistry entityRegistry, @Nonnull Authorizer authorizer, - @Nonnull Authentication sessionAuthorization) { + @Nonnull Authentication sessionAuthorization, + @Nullable EntityRegistry entityRegistry) { return systemContextNoSearchAuthorization(entityRegistry) - .asSession(authorizer, sessionAuthorization); + .asSession(RequestContext.TEST, authorizer, sessionAuthorization); } private TestOperationContexts() {} diff --git a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/OperationContextTest.java b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/OperationContextTest.java index 81583deba0e6c..0e42fa173e96d 100644 --- a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/OperationContextTest.java +++ b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/OperationContextTest.java @@ -8,7 +8,6 @@ import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.utils.elasticsearch.IndexConventionImpl; import org.testng.annotations.Test; public class OperationContextTest { @@ -22,11 +21,13 @@ public void testSystemPrivilegeEscalation() { OperationContext systemOpContext = OperationContext.asSystem( OperationContextConfig.builder().build(), - mock(EntityRegistry.class), systemAuth, - IndexConventionImpl.NO_PREFIX); + mock(EntityRegistry.class), + mock(ServicesRegistryContext.class), + null); - OperationContext opContext = systemOpContext.asSession(Authorizer.EMPTY, userAuth); + OperationContext opContext = + systemOpContext.asSession(RequestContext.TEST, Authorizer.EMPTY, userAuth); assertEquals( opContext.getAuthentication(), systemAuth, "Expected system authentication when allowed"); diff --git a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java index aefa4d17b42c9..139be5cca8856 100644 --- a/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java +++ b/metadata-service/auth-config/src/main/java/com/datahub/authorization/AuthorizationConfiguration.java @@ -1,6 +1,6 @@ package com.datahub.authorization; -import com.datahub.authorization.config.SearchAuthorizationConfiguration; +import com.datahub.authorization.config.ViewAuthorizationConfiguration; import com.datahub.plugins.auth.authorization.Authorizer; import java.util.List; import lombok.Data; @@ -14,5 +14,5 @@ public class AuthorizationConfiguration { /** List of configurations for {@link Authorizer}s to be registered */ private List authorizers; - private SearchAuthorizationConfiguration search; + private ViewAuthorizationConfiguration view; } diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java index 731cf08185384..023bbdf0cb35c 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/invite/InviteTokenService.java @@ -17,10 +17,10 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.services.SecretService; import java.net.URISyntaxException; import java.util.Collections; import javax.annotation.Nonnull; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java index ff46642827b30..2ee33c0f2554d 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authentication/user/NativeUserService.java @@ -13,9 +13,9 @@ import com.linkedin.identity.CorpUserInfo; import com.linkedin.identity.CorpUserStatus; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; +import io.datahubproject.metadata.services.SecretService; import java.time.Instant; import java.util.Base64; import java.util.Objects; diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java index b5c6910776e52..0aeb9c100db99 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/DataHubAuthorizer.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Objects; @@ -52,17 +53,17 @@ public enum AuthorizationMode { // Maps privilege name to the associated set of policies for fast access. // Not concurrent data structure because writes are always against the entire thing. - private final Map> _policyCache = + protected final Map> policyCache = new HashMap<>(); // Shared Policy Cache. - private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); - private final Lock readLock = readWriteLock.readLock(); + protected final ReadWriteLock readWriteLock = new ReentrantReadWriteLock(); + protected final Lock readLock = readWriteLock.readLock(); - private final ScheduledExecutorService _refreshExecutorService = + private final ScheduledExecutorService refreshExecutorService = Executors.newScheduledThreadPool(1); - private final PolicyRefreshRunnable _policyRefreshRunnable; - private final PolicyEngine _policyEngine; - private EntitySpecResolver _entitySpecResolver; - private AuthorizationMode _mode; + private final PolicyRefreshRunnable policyRefreshRunnable; + private final PolicyEngine policyEngine; + private EntitySpecResolver entitySpecResolver; + private AuthorizationMode mode; private final OperationContext systemOpContext; public static final String ALL = "ALL"; @@ -75,24 +76,28 @@ public DataHubAuthorizer( final AuthorizationMode mode, final int policyFetchSize) { this.systemOpContext = systemOpContext; - _mode = Objects.requireNonNull(mode); - _policyEngine = + this.mode = Objects.requireNonNull(mode); + policyEngine = new PolicyEngine(systemOpContext.getAuthentication(), Objects.requireNonNull(entityClient)); - _policyRefreshRunnable = - new PolicyRefreshRunnable( - systemOpContext, - new PolicyFetcher(entityClient), - _policyCache, - readWriteLock.writeLock(), - policyFetchSize); - _refreshExecutorService.scheduleAtFixedRate( - _policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); + if (refreshIntervalSeconds > 0) { + policyRefreshRunnable = + new PolicyRefreshRunnable( + systemOpContext, + new PolicyFetcher(entityClient), + policyCache, + readWriteLock.writeLock(), + policyFetchSize); + refreshExecutorService.scheduleAtFixedRate( + policyRefreshRunnable, delayIntervalSeconds, refreshIntervalSeconds, TimeUnit.SECONDS); + } else { + policyRefreshRunnable = null; + } } @Override public void init(@Nonnull Map authorizerConfig, @Nonnull AuthorizerContext ctx) { // Pass. No static config. - _entitySpecResolver = Objects.requireNonNull(ctx.getEntitySpecResolver()); + entitySpecResolver = Objects.requireNonNull(ctx.getEntitySpecResolver()); } public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request) { @@ -103,11 +108,13 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request } Optional resolvedResourceSpec = - request.getResourceSpec().map(_entitySpecResolver::resolve); + request.getResourceSpec().map(entitySpecResolver::resolve); // 1. Fetch the policies relevant to the requested privilege. final List policiesToEvaluate = - getOrDefault(request.getPrivilege(), new ArrayList<>()); + new LinkedList<>(getOrDefault(request.getPrivilege(), new ArrayList<>())); + policiesToEvaluate.addAll( + PoliciesConfig.getDefaultPolicies(UrnUtils.getUrn(request.getActorUrn()))); // 2. Evaluate each policy. for (DataHubPolicyInfo policy : policiesToEvaluate) { @@ -124,35 +131,41 @@ public AuthorizationResult authorize(@Nonnull final AuthorizationRequest request public List getGrantedPrivileges( final String actor, final Optional resourceSpec) { - // 1. Fetch all policies - final List policiesToEvaluate = getOrDefault(ALL, new ArrayList<>()); Urn actorUrn = UrnUtils.getUrn(actor); + + // 1. Fetch all policies + final List policiesToEvaluate = + new LinkedList<>(getOrDefault(ALL, new ArrayList<>())); + policiesToEvaluate.addAll(PoliciesConfig.getDefaultPolicies(actorUrn)); + final ResolvedEntitySpec resolvedActorSpec = - _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); + entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actor)); Optional resolvedResourceSpec = - resourceSpec.map(_entitySpecResolver::resolve); + resourceSpec.map(entitySpecResolver::resolve); - return _policyEngine.getGrantedPrivileges( + return policyEngine.getGrantedPrivileges( policiesToEvaluate, resolvedActorSpec, resolvedResourceSpec); } @Override public Set getActorPolicies(@Nonnull Urn actorUrn) { // 1. Fetch all policies - final List policiesToEvaluate = getOrDefault(ALL, new ArrayList<>()); + final List policiesToEvaluate = + new LinkedList<>(getOrDefault(ALL, new ArrayList<>())); + policiesToEvaluate.addAll(PoliciesConfig.getDefaultPolicies(actorUrn)); // 2. Actor identity final ResolvedEntitySpec resolvedActorSpec = - _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actorUrn.toString())); + entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actorUrn.toString())); return policiesToEvaluate.stream() .filter(policy -> PoliciesConfig.ACTIVE_POLICY_STATE.equals(policy.getState())) .filter( policy -> - policy.getActors().isResourceOwners() - || _policyEngine.isActorMatch( + (policy.getActors() != null && policy.getActors().isResourceOwners()) + || policyEngine.isActorMatch( resolvedActorSpec, policy.getActors(), Optional.empty(), @@ -164,7 +177,7 @@ public Set getActorPolicies(@Nonnull Urn actorUrn) { public Collection getActorGroups(@Nonnull Urn actorUrn) { // 1. Actor identity final ResolvedEntitySpec resolvedActorSpec = - _entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actorUrn.toString())); + entitySpecResolver.resolve(new EntitySpec(actorUrn.getEntityType(), actorUrn.toString())); return resolvedActorSpec.getGroupMembership().stream() .map(UrnUtils::getUrn) @@ -194,7 +207,7 @@ public AuthorizedActors authorizedActors( final List policiesToEvaluate = getOrDefault(privilege, new ArrayList<>()); Optional resolvedResourceSpec = - resourceSpec.map(_entitySpecResolver::resolve); + resourceSpec.map(entitySpecResolver::resolve); // Step 2: For each policy, determine whether the resource is a match. for (DataHubPolicyInfo policy : policiesToEvaluate) { @@ -204,7 +217,7 @@ public AuthorizedActors authorizedActors( } final PolicyEngine.PolicyActors matchingActors = - _policyEngine.getMatchingActors(policy, resolvedResourceSpec); + policyEngine.getMatchingActors(policy, resolvedResourceSpec); // Step 3: For each matching policy, add actors that are authorized. authorizedUsers.addAll(matchingActors.getUsers()); @@ -228,15 +241,17 @@ public AuthorizedActors authorizedActors( * created, modified, or deleted. */ public void invalidateCache() { - _refreshExecutorService.execute(_policyRefreshRunnable); + if (policyRefreshRunnable != null) { + refreshExecutorService.execute(policyRefreshRunnable); + } } public AuthorizationMode mode() { - return _mode; + return mode; } public void setMode(final AuthorizationMode mode) { - _mode = mode; + this.mode = mode; } /** @@ -263,10 +278,10 @@ private boolean isRequestGranted( } final ResolvedEntitySpec resolvedActorSpec = - _entitySpecResolver.resolve( + entitySpecResolver.resolve( new EntitySpec(actorUrn.get().getEntityType(), request.getActorUrn())); final PolicyEngine.PolicyEvaluationResult result = - _policyEngine.evaluatePolicy( + policyEngine.evaluatePolicy( policy, resolvedActorSpec, request.getPrivilege(), resourceSpec); return result.isGranted(); } @@ -286,7 +301,7 @@ private Optional getUrnFromRequestActor(String actor) { private List getOrDefault(String key, List defaultValue) { readLock.lock(); try { - return _policyCache.getOrDefault(key, defaultValue); + return policyCache.getOrDefault(key, defaultValue); } finally { // To unlock the acquired read thread readLock.unlock(); @@ -304,8 +319,8 @@ private List getOrDefault(String key, List static class PolicyRefreshRunnable implements Runnable { private final OperationContext systemOpContext; - private final PolicyFetcher _policyFetcher; - private final Map> _policyCache; + private final PolicyFetcher policyFetcher; + private final Map> policyCache; private final Lock writeLock; private final int count; @@ -320,7 +335,7 @@ public void run() { while (total == null || scrollId != null) { try { final PolicyFetcher.PolicyFetchResult policyFetchResult = - _policyFetcher.fetchPolicies(systemOpContext, count, scrollId, null); + policyFetcher.fetchPolicies(systemOpContext, count, scrollId, null); addPoliciesToCache(newCache, policyFetchResult.getPolicies()); @@ -338,8 +353,8 @@ public void run() { writeLock.lock(); try { - _policyCache.clear(); - _policyCache.putAll(newCache); + policyCache.clear(); + policyCache.putAll(newCache); } finally { // To unlock the acquired write thread writeLock.unlock(); diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java index 13c50059031b6..3f74be6ff4ba6 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/authorization/PolicyEngine.java @@ -36,6 +36,7 @@ import java.util.Set; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.AccessLevel; import lombok.AllArgsConstructor; @@ -326,30 +327,36 @@ private boolean isOwnerMatch( return isActorOwner(resolvedActorSpec, requestResource.get(), ownershipTypes, context); } - private Set getOwnersForType(EntitySpec resourceSpec, List ownershipTypes) { - Urn entityUrn = UrnUtils.getUrn(resourceSpec.getEntity()); - EnvelopedAspect ownershipAspect; - try { - EntityResponse response = - _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), - _systemAuthentication); - if (response == null || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { + private Set getOwnersForType( + @Nonnull EntitySpec resourceSpec, @Nonnull List ownershipTypes) { + if (resourceSpec.getEntity().isEmpty()) { + return Set.of(); + } else { + Urn entityUrn = UrnUtils.getUrn(resourceSpec.getEntity()); + EnvelopedAspect ownershipAspect; + try { + EntityResponse response = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(Constants.OWNERSHIP_ASPECT_NAME), + _systemAuthentication); + if (response == null + || !response.getAspects().containsKey(Constants.OWNERSHIP_ASPECT_NAME)) { + return Collections.emptySet(); + } + ownershipAspect = response.getAspects().get(Constants.OWNERSHIP_ASPECT_NAME); + } catch (Exception e) { + log.error("Error while retrieving ownership aspect for urn {}", entityUrn, e); return Collections.emptySet(); } - ownershipAspect = response.getAspects().get(Constants.OWNERSHIP_ASPECT_NAME); - } catch (Exception e) { - log.error("Error while retrieving ownership aspect for urn {}", entityUrn, e); - return Collections.emptySet(); - } - Ownership ownership = new Ownership(ownershipAspect.getValue().data()); - Stream ownersStream = ownership.getOwners().stream(); - if (ownershipTypes != null) { - ownersStream = ownersStream.filter(owner -> ownershipTypes.contains(owner.getTypeUrn())); + Ownership ownership = new Ownership(ownershipAspect.getValue().data()); + Stream ownersStream = ownership.getOwners().stream(); + if (ownershipTypes != null) { + ownersStream = ownersStream.filter(owner -> ownershipTypes.contains(owner.getTypeUrn())); + } + return ownersStream.map(owner -> owner.getOwner().toString()).collect(Collectors.toSet()); } - return ownersStream.map(owner -> owner.getOwner().toString()).collect(Collectors.toSet()); } private boolean isActorOwner( diff --git a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java index de2c18782d3d8..ccdc9e3732d91 100644 --- a/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java +++ b/metadata-service/auth-impl/src/main/java/com/datahub/telemetry/TrackingService.java @@ -13,11 +13,11 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.version.GitVersion; import com.linkedin.telemetry.TelemetryClientId; import com.mixpanel.mixpanelapi.MessageBuilder; import com.mixpanel.mixpanelapi.MixpanelAPI; +import io.datahubproject.metadata.services.SecretService; import java.io.IOException; import java.util.HashSet; import java.util.Set; diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java index 775039766a2c9..36f2368523a7f 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/invite/InviteTokenServiceTest.java @@ -17,8 +17,8 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.secret.SecretService; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.services.SecretService; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java index 60b10e3c53ef4..70f8e779bb92e 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authentication/user/NativeUserServiceTest.java @@ -13,7 +13,7 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.identity.CorpUserCredentials; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; +import io.datahubproject.metadata.services.SecretService; import java.time.Instant; import java.util.concurrent.TimeUnit; import org.testng.annotations.BeforeMethod; diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java index c37dc70ef0649..ddd2a13736992 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/authorization/DataHubAuthorizerTest.java @@ -50,6 +50,7 @@ import com.linkedin.policy.DataHubResourceFilter; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.ServicesRegistryContext; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; @@ -269,8 +270,9 @@ public void setupTest() throws Exception { systemOpContext = OperationContext.asSystem( OperationContextConfig.builder().build(), - mock(EntityRegistry.class), systemAuthentication, + mock(EntityRegistry.class), + mock(ServicesRegistryContext.class), mock(IndexConvention.class)); _dataHubAuthorizer = diff --git a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java index 8baeb7d3f8443..0220b5fc3ce22 100644 --- a/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java +++ b/metadata-service/auth-impl/src/test/java/com/datahub/telemetry/TrackingServiceTest.java @@ -11,11 +11,11 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.version.GitVersion; import com.linkedin.telemetry.TelemetryClientId; import com.mixpanel.mixpanelapi.MessageBuilder; import com.mixpanel.mixpanelapi.MixpanelAPI; +import io.datahubproject.metadata.services.SecretService; import java.io.IOException; import java.util.Optional; import org.json.JSONException; diff --git a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java index fc283b7e986bb..10f159fd91b33 100644 --- a/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java +++ b/metadata-service/auth-servlet-impl/src/main/java/com/datahub/auth/authentication/AuthServiceController.java @@ -18,10 +18,10 @@ import com.linkedin.common.urn.Urn; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.settings.global.GlobalSettingsInfo; import com.linkedin.settings.global.OidcSettings; import com.linkedin.settings.global.SsoSettings; +import io.datahubproject.metadata.services.SecretService; import jakarta.inject.Inject; import java.util.Objects; import java.util.concurrent.CompletableFuture; diff --git a/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java b/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java index 428f14e67d137..07f1bba5547da 100644 --- a/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java +++ b/metadata-service/auth-servlet-impl/src/test/java/com/datahub/auth/authentication/AuthServiceTestConfiguration.java @@ -7,7 +7,7 @@ import com.datahub.telemetry.TrackingService; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; +import io.datahubproject.metadata.services.SecretService; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.boot.test.mock.mockito.MockBean; diff --git a/metadata-service/configuration/src/main/resources/application.yml b/metadata-service/configuration/src/main/resources/application.yml index 8eaf1ed294c6a..d1ed955824729 100644 --- a/metadata-service/configuration/src/main/resources/application.yml +++ b/metadata-service/configuration/src/main/resources/application.yml @@ -45,11 +45,11 @@ authorization: cachePolicyFetchSize: ${POLICY_CACHE_FETCH_SIZE:1000} # Enables authorization of reads, writes, and deletes on REST APIs. Defaults to false for backwards compatibility, but should become true down the road restApiAuthorization: ${REST_API_AUTHORIZATION_ENABLED:false} - search: - enabled: ${SEARCH_AUTHORIZATION_ENABLED:false} + view: + enabled: ${VIEW_AUTHORIZATION_ENABLED:false} recommendations: # Currently limited to the actor only, see TODO: DataHubAuthorizer - peerGroupEnabled: ${SEARCH_AUTHORIZATION_RECOMMENDATIONS_PEER_GROUP_ENABLED:true} + peerGroupEnabled: ${VIEW_AUTHORIZATION_RECOMMENDATIONS_PEER_GROUP_ENABLED:true} ingestion: # The value of cliMajorVersion is substituted in by the processResources Gradle task. diff --git a/metadata-service/factories/build.gradle b/metadata-service/factories/build.gradle index b250435b4a642..8bce521a177d3 100644 --- a/metadata-service/factories/build.gradle +++ b/metadata-service/factories/build.gradle @@ -9,6 +9,7 @@ dependencies { api project(':metadata-service:auth-config') api project(':metadata-service:plugin') api project(':metadata-service:configuration') + implementation project(':metadata-service:restli-client') implementation project(':datahub-graphql-core') implementation project(':metadata-service:restli-servlet-impl') implementation project(':metadata-dao-impl:kafka-producer') diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java index 7a2b14fdb0f28..5e486f0bdb86c 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/InviteTokenServiceFactory.java @@ -2,8 +2,8 @@ import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import io.datahubproject.metadata.services.SecretService; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java index 0ed8f1a4b7af4..92b0667f531a6 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/NativeUserServiceFactory.java @@ -4,8 +4,8 @@ import com.linkedin.entity.client.SystemEntityClient; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import io.datahubproject.metadata.services.SecretService; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/SystemOperationContextFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/SystemOperationContextFactory.java index 0dfdee5fcbbbc..9b56b56485dbf 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/SystemOperationContextFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/SystemOperationContextFactory.java @@ -6,6 +6,8 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.datahubproject.metadata.context.OperationContext; import io.datahubproject.metadata.context.OperationContextConfig; +import io.datahubproject.metadata.context.ServicesRegistryContext; +import io.datahubproject.metadata.services.RestrictedService; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -24,12 +26,14 @@ public class SystemOperationContextFactory { protected OperationContext systemOperationContext( @Nonnull final EntityRegistry entityRegistry, @Nonnull @Qualifier("systemAuthentication") final Authentication systemAuthentication, - @Nonnull final OperationContextConfig operationContextConfig) { + @Nonnull final OperationContextConfig operationContextConfig, + @Nonnull final RestrictedService restrictedService) { return OperationContext.asSystem( operationContextConfig, - entityRegistry, systemAuthentication, + entityRegistry, + ServicesRegistryContext.builder().restrictedService(restrictedService).build(), components.getIndexConvention()); } @@ -38,7 +42,7 @@ protected OperationContext systemOperationContext( protected OperationContextConfig operationContextConfig( final ConfigurationProvider configurationProvider) { return OperationContextConfig.builder() - .searchAuthorizationConfiguration(configurationProvider.getAuthorization().getSearch()) + .viewAuthorizationConfiguration(configurationProvider.getAuthorization().getView()) .build(); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RestrictedServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/services/RestrictedServiceFactory.java similarity index 75% rename from metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RestrictedServiceFactory.java rename to metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/services/RestrictedServiceFactory.java index de161023faed9..1fbcb7ff5aef3 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/auth/RestrictedServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/services/RestrictedServiceFactory.java @@ -1,8 +1,8 @@ -package com.linkedin.gms.factory.auth; +package com.linkedin.gms.factory.context.services; -import com.linkedin.metadata.secret.SecretService; -import com.linkedin.metadata.service.RestrictedService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; @@ -17,12 +17,12 @@ public class RestrictedServiceFactory { @Autowired @Qualifier("dataHubSecretService") - private SecretService _secretService; + private SecretService secretService; @Bean(name = "restrictedService") @Scope("singleton") @Nonnull protected RestrictedService getInstance() throws Exception { - return new RestrictedService(_secretService); + return new RestrictedService(secretService); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/services/SecretServiceFactory.java similarity index 83% rename from metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java rename to metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/services/SecretServiceFactory.java index 64093c54d0410..60ab3fb5d9381 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/secret/SecretServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/context/services/SecretServiceFactory.java @@ -1,6 +1,6 @@ -package com.linkedin.gms.factory.secret; +package com.linkedin.gms.factory.context.services; -import com.linkedin.metadata.secret.SecretService; +import io.datahubproject.metadata.services.SecretService; import javax.annotation.Nonnull; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java index e1055835616ea..093cb74012849 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/entity/RollbackServiceFactory.java @@ -5,23 +5,18 @@ import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import javax.annotation.Nonnull; -import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class RollbackServiceFactory { - @Value("${authorization.restApiAuthorization:false}") - boolean restApiAuthorizationEnabled; - @Bean @Nonnull protected RollbackService rollbackService( final EntityService entityService, final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService) { - return new RollbackService( - entityService, systemMetadataService, timeseriesAspectService, restApiAuthorizationEnabled); + return new RollbackService(entityService, systemMetadataService, timeseriesAspectService); } } diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java index 5a4d22428c29b..d6d5e85cc3f38 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/graphql/GraphQLEngineFactory.java @@ -26,21 +26,21 @@ import com.linkedin.metadata.graph.SiblingGraphService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.recommendation.RecommendationsService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.service.DataProductService; import com.linkedin.metadata.service.ERModelRelationshipService; import com.linkedin.metadata.service.FormService; import com.linkedin.metadata.service.LineageService; import com.linkedin.metadata.service.OwnershipTypeService; import com.linkedin.metadata.service.QueryService; -import com.linkedin.metadata.service.RestrictedService; import com.linkedin.metadata.service.SettingsService; import com.linkedin.metadata.service.ViewService; import com.linkedin.metadata.timeline.TimelineService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import com.linkedin.metadata.version.GitVersion; -import com.linkedin.usage.UsageClient; +import com.linkedin.usage.RestliUsageClient; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import javax.annotation.Nonnull; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; @@ -75,7 +75,7 @@ public class GraphQLEngineFactory { @Autowired @Qualifier("usageClient") - private UsageClient usageClient; + private RestliUsageClient usageClient; @Autowired @Qualifier("entityService") diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java index cb0ef29b50a89..1d844e5c5dab7 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/telemetry/TrackingServiceFactory.java @@ -2,11 +2,11 @@ import com.datahub.telemetry.TrackingService; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.secret.SecretService; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import com.linkedin.metadata.version.GitVersion; import com.mixpanel.mixpanelapi.MessageBuilder; import com.mixpanel.mixpanelapi.MixpanelAPI; +import io.datahubproject.metadata.services.SecretService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; diff --git a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java index d0fe095ddfd91..560d183793085 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java +++ b/metadata-service/factories/src/main/java/com/linkedin/gms/factory/usage/UsageClientFactory.java @@ -6,7 +6,7 @@ import com.linkedin.parseq.retry.backoff.ExponentialBackoff; import com.linkedin.r2.transport.http.client.HttpClientFactory; import com.linkedin.restli.client.Client; -import com.linkedin.usage.UsageClient; +import com.linkedin.usage.RestliUsageClient; import io.datahubproject.metadata.context.OperationContext; import java.util.HashMap; import java.util.Map; @@ -47,7 +47,7 @@ public class UsageClientFactory { private ConfigurationProvider configurationProvider; @Bean("usageClient") - public UsageClient getUsageClient( + public RestliUsageClient getUsageClient( @Qualifier("systemOperationContext") final OperationContext systemOperationContext) { Map params = new HashMap<>(); params.put(HttpClientFactory.HTTP_REQUEST_TIMEOUT, String.valueOf(timeoutMs)); @@ -55,7 +55,7 @@ public UsageClient getUsageClient( Client restClient = DefaultRestliClientFactory.getRestLiClient( gmsHost, gmsPort, gmsUseSSL, gmsSslProtocol, params); - return new UsageClient( + return new RestliUsageClient( systemOperationContext, restClient, new ExponentialBackoff(retryInterval), diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java index 3a6b704613a56..1baafdcb17877 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/IngestPoliciesStep.java @@ -34,6 +34,7 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; @@ -160,11 +161,13 @@ private void insertPolicyDocument(EntityResponse entityResponse, AspectSpec aspe Optional searchDocument; try { searchDocument = - _searchDocumentTransformer.transformAspect( - entityResponse.getUrn(), - new DataHubPolicyInfo(aspect.getValue().data()), - aspectSpec, - false); + _searchDocumentTransformer + .transformAspect( + entityResponse.getUrn(), + new DataHubPolicyInfo(aspect.getValue().data()), + aspectSpec, + false) + .map(Objects::toString); } catch (Exception e) { log.error( "Error in getting documents from aspect: {} for aspect {}", e, aspectSpec.getName()); diff --git a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java index 7e232f939dc08..b7f0871276ac2 100644 --- a/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java +++ b/metadata-service/factories/src/main/java/com/linkedin/metadata/boot/steps/RemoveClientIdAspectStep.java @@ -37,9 +37,8 @@ public void execute() throws Exception { BootstrapStep.setUpgradeResult(REMOVE_UNKNOWN_ASPECTS_URN, _entityService); } catch (Exception e) { - log.error("Error when running the RemoveUnknownAspects Bootstrap Step", e); + log.warn("Error when running the RemoveUnknownAspects Bootstrap Step"); _entityService.deleteUrn(REMOVE_UNKNOWN_ASPECTS_URN); - throw new RuntimeException("Error when running the RemoveUnknownAspects Bootstrap Step", e); } } diff --git a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java index a8e6b50089602..e3d85221554ff 100644 --- a/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java +++ b/metadata-service/factories/src/test/java/com/linkedin/gms/factory/secret/SecretServiceFactoryTest.java @@ -4,7 +4,8 @@ import static org.testng.Assert.assertNotNull; import com.linkedin.gms.factory.config.ConfigurationProvider; -import com.linkedin.metadata.secret.SecretService; +import com.linkedin.gms.factory.context.services.SecretServiceFactory; +import io.datahubproject.metadata.services.SecretService; import java.io.IOException; import java.nio.charset.StandardCharsets; import org.springframework.beans.factory.annotation.Autowired; diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java index 184379d44a7ad..4900ebee25f0d 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/GraphQLController.java @@ -102,8 +102,15 @@ CompletableFuture> postGraphQL(HttpEntity httpEnt * Init QueryContext */ Authentication authentication = AuthenticationContext.getAuthentication(); + SpringQueryContext context = - new SpringQueryContext(true, authentication, _authorizerChain, systemOperationContext); + new SpringQueryContext( + true, + authentication, + _authorizerChain, + systemOperationContext, + queryJson.asText(), + variables); Span.current().setAttribute("actor.urn", context.getActorUrn()); return CompletableFuture.supplyAsync( diff --git a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java index 20e06945e1d6b..b815563818e78 100644 --- a/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java +++ b/metadata-service/graphql-servlet-impl/src/main/java/com/datahub/graphql/SpringQueryContext.java @@ -3,7 +3,11 @@ import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.datahub.graphql.QueryContext; +import graphql.language.OperationDefinition; +import graphql.parser.Parser; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.RequestContext; +import java.util.Map; import javax.annotation.Nonnull; import lombok.Getter; @@ -19,11 +23,29 @@ public SpringQueryContext( final boolean isAuthenticated, final Authentication authentication, final Authorizer authorizer, - @Nonnull final OperationContext systemOperationContext) { + @Nonnull final OperationContext systemOperationContext, + String jsonQuery, + Map variables) { this.isAuthenticated = isAuthenticated; this.authentication = authentication; this.authorizer = authorizer; + + String queryName = + new Parser() + .parseDocument(jsonQuery).getDefinitions().stream() + .filter(def -> def instanceof OperationDefinition) + .map(def -> (OperationDefinition) def) + .filter(opDef -> opDef.getOperation().equals(OperationDefinition.Operation.QUERY)) + .findFirst() + .map(OperationDefinition::getName) + .orElse("graphql"); + this.operationContext = - OperationContext.asSession(systemOperationContext, authorizer, authentication, true); + OperationContext.asSession( + systemOperationContext, + RequestContext.builder().buildGraphql(queryName, variables), + authorizer, + authentication, + true); } } diff --git a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java index 0cedfc22ded6b..9a6b3987b54eb 100644 --- a/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java +++ b/metadata-service/openapi-analytics-servlet/src/main/java/io/datahubproject/openapi/delegates/DatahubUsageEventsImpl.java @@ -1,20 +1,17 @@ package io.datahubproject.openapi.delegates; +import static com.linkedin.metadata.authorization.ApiGroup.ANALYTICS; +import static com.linkedin.metadata.authorization.ApiOperation.READ; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthUtil; import com.datahub.authorization.AuthorizerChain; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.search.elasticsearch.ElasticSearchService; import io.datahubproject.openapi.exception.UnauthorizedException; import io.datahubproject.openapi.generated.controller.DatahubUsageEventsApiDelegate; import java.util.Objects; -import java.util.Optional; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; import org.springframework.http.ResponseEntity; public class DatahubUsageEventsImpl implements DatahubUsageEventsApiDelegate { @@ -22,9 +19,6 @@ public class DatahubUsageEventsImpl implements DatahubUsageEventsApiDelegate { @Autowired private ElasticSearchService _searchService; @Autowired private AuthorizerChain _authorizationChain; - @Value("${authorization.restApiAuthorization:false}") - private boolean _restApiAuthorizationEnabled; - public static final String DATAHUB_USAGE_INDEX = "datahub_usage_event"; @Override @@ -35,16 +29,9 @@ public ResponseEntity raw(String body) { } private void checkAnalyticsAuthorized(Authentication authentication) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ANALYTICS_PRIVILEGE.getType())))); - - if (_restApiAuthorizationEnabled - && !AuthUtil.isAuthorized(_authorizationChain, actorUrnStr, Optional.empty(), orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get analytics."); + if (!AuthUtil.isAPIAuthorized(authentication, _authorizationChain, ANALYTICS, READ)) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to get analytics."); } } } diff --git a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java index 20fdb0db0bd09..f366cef4d979f 100644 --- a/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java +++ b/metadata-service/openapi-entity-servlet/src/main/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImpl.java @@ -1,17 +1,14 @@ package io.datahubproject.openapi.v2.delegates; +import static com.linkedin.metadata.authorization.ApiOperation.EXISTS; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static io.datahubproject.openapi.util.ReflectionCache.toLowerFirst; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthUtil; import com.datahub.authorization.AuthorizerChain; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.SearchFlags; @@ -20,6 +17,7 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchService; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.RequestContext; import io.datahubproject.openapi.dto.UpsertAspectRequest; import io.datahubproject.openapi.dto.UrnResponseMap; import io.datahubproject.openapi.entities.EntitiesController; @@ -65,6 +63,7 @@ import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nullable; import javax.validation.Valid; import javax.validation.constraints.Min; import org.springframework.http.HttpEntity; @@ -78,8 +77,6 @@ public class EntityApiDelegateImpl { private final SearchService _searchService; private final EntitiesController _v1Controller; private final AuthorizerChain _authorizationChain; - - private final boolean _restApiAuthorizationEnabled; private final Class _reqClazz; private final Class _respClazz; private final Class _scrollRespClazz; @@ -91,7 +88,6 @@ public EntityApiDelegateImpl( EntityService entityService, SearchService searchService, EntitiesController entitiesController, - boolean restApiAuthorizationEnabled, AuthorizerChain authorizationChain, Class reqClazz, Class respClazz, @@ -102,7 +98,6 @@ public EntityApiDelegateImpl( this._entityRegistry = entityService.getEntityRegistry(); this._v1Controller = entitiesController; this._authorizationChain = authorizationChain; - this._restApiAuthorizationEnabled = restApiAuthorizationEnabled; this._reqClazz = reqClazz; this._respClazz = respClazz; this._scrollRespClazz = scrollRespClazz; @@ -122,7 +117,10 @@ public ResponseEntity get(String urn, Boolean systemMetadata, List as systemMetadata)); } - public ResponseEntity> create(List body) { + public ResponseEntity> create( + List body, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { List aspects = body.stream() .flatMap( @@ -130,7 +128,7 @@ public ResponseEntity> create(List body) { OpenApiEntitiesUtil.convertEntityToUpsert(b, _reqClazz, _entityRegistry) .stream()) .collect(Collectors.toList()); - _v1Controller.postEntities(aspects, false); + _v1Controller.postEntities(aspects, false, createIfNotExists, createEntityIfNotExists); List responses = body.stream() .map(req -> OpenApiEntitiesUtil.convertToResponse(req, _respClazz, _entityRegistry)) @@ -146,6 +144,14 @@ public ResponseEntity delete(String urn) { public ResponseEntity head(String urn) { try { Urn entityUrn = Urn.createFromString(urn); + + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + auth, _authorizationChain, EXISTS, List.of(entityUrn))) { + throw new UnauthorizedException( + auth.getActor().toUrnStr() + " is unauthorized to check existence of entities."); + } + if (_entityService.exists(entityUrn, true)) { return new ResponseEntity<>(HttpStatus.NO_CONTENT); } else { @@ -171,11 +177,20 @@ public ResponseEntity getAspect( } public ResponseEntity createAspect( - String urn, String aspectName, AQ body, Class reqClazz, Class respClazz) { + String urn, + String aspectName, + AQ body, + Class reqClazz, + Class respClazz, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { UpsertAspectRequest aspectUpsert = OpenApiEntitiesUtil.convertAspectToUpsert(urn, body, reqClazz); _v1Controller.postEntities( - Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList()), false); + Stream.of(aspectUpsert).filter(Objects::nonNull).collect(Collectors.toList()), + false, + createIfNotExists, + createEntityIfNotExists); AR response = OpenApiEntitiesUtil.convertToResponseAspect(body, respClazz); return ResponseEntity.ok(response); } @@ -183,6 +198,14 @@ public ResponseEntity createAspect( public ResponseEntity headAspect(String urn, String aspect) { try { Urn entityUrn = Urn.createFromString(urn); + + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + auth, _authorizationChain, EXISTS, List.of(entityUrn))) { + throw new UnauthorizedException( + auth.getActor().toUrnStr() + " is unauthorized to check existence of entities."); + } + if (_entityService.exists(entityUrn, aspect, true)) { return new ResponseEntity<>(HttpStatus.NO_CONTENT); } else { @@ -200,7 +223,10 @@ public ResponseEntity deleteAspect(String urn, String aspect) { } public ResponseEntity createDomains( - DomainsAspectRequestV2 body, String urn) { + DomainsAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -208,11 +234,16 @@ public ResponseEntity createDomains( methodNameToAspectName(methodName), body, DomainsAspectRequestV2.class, - DomainsAspectResponseV2.class); + DomainsAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createGlobalTags( - GlobalTagsAspectRequestV2 body, String urn) { + GlobalTagsAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -220,11 +251,16 @@ public ResponseEntity createGlobalTags( methodNameToAspectName(methodName), body, GlobalTagsAspectRequestV2.class, - GlobalTagsAspectResponseV2.class); + GlobalTagsAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createGlossaryTerms( - GlossaryTermsAspectRequestV2 body, String urn) { + GlossaryTermsAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -232,11 +268,16 @@ public ResponseEntity createGlossaryTerms( methodNameToAspectName(methodName), body, GlossaryTermsAspectRequestV2.class, - GlossaryTermsAspectResponseV2.class); + GlossaryTermsAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createOwnership( - OwnershipAspectRequestV2 body, String urn) { + OwnershipAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -244,11 +285,16 @@ public ResponseEntity createOwnership( methodNameToAspectName(methodName), body, OwnershipAspectRequestV2.class, - OwnershipAspectResponseV2.class); + OwnershipAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createStatus( - StatusAspectRequestV2 body, String urn) { + StatusAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -256,7 +302,9 @@ public ResponseEntity createStatus( methodNameToAspectName(methodName), body, StatusAspectRequestV2.class, - StatusAspectResponseV2.class); + StatusAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity deleteDomains(String urn) { @@ -412,7 +460,10 @@ public ResponseEntity headDeprecation(String urn) { } public ResponseEntity createDeprecation( - @Valid DeprecationAspectRequestV2 body, String urn) { + @Valid DeprecationAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -420,7 +471,9 @@ public ResponseEntity createDeprecation( methodNameToAspectName(methodName), body, DeprecationAspectRequestV2.class, - DeprecationAspectResponseV2.class); + DeprecationAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity headBrowsePathsV2(String urn) { @@ -442,7 +495,10 @@ public ResponseEntity getBrowsePathsV2( } public ResponseEntity createBrowsePathsV2( - @Valid BrowsePathsV2AspectRequestV2 body, String urn) { + @Valid BrowsePathsV2AspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -450,7 +506,9 @@ public ResponseEntity createBrowsePathsV2( methodNameToAspectName(methodName), body, BrowsePathsV2AspectRequestV2.class, - BrowsePathsV2AspectResponseV2.class); + BrowsePathsV2AspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity scroll( @@ -464,14 +522,24 @@ public ResponseEntity scroll( SearchFlags searchFlags = new SearchFlags().setFulltext(false).setSkipAggregates(true).setSkipHighlighting(true); + com.linkedin.metadata.models.EntitySpec entitySpec = + OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); Authentication authentication = AuthenticationContext.getAuthentication(); + + if (!AuthUtil.isAPIAuthorizedEntityType( + authentication, _authorizationChain, READ, entitySpec.getName())) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to search entities."); + } + OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizationChain, authentication, true); - com.linkedin.metadata.models.EntitySpec entitySpec = - OpenApiEntitiesUtil.responseClassToEntitySpec(_entityRegistry, _respClazz); - checkScrollAuthorized(authentication, entitySpec); + systemOperationContext, + RequestContext.builder().buildOpenapi("scroll", entitySpec.getName()), + _authorizationChain, + authentication, + true); // TODO multi-field sort SortCriterion sortCriterion = new SortCriterion(); @@ -491,6 +559,11 @@ public ResponseEntity scroll( null, count); + if (!AuthUtil.isAPIAuthorizedResult(authentication, _authorizationChain, result)) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " entities."); + } + String[] urns = result.getEntities().stream() .map(SearchEntity::getEntity) @@ -512,26 +585,11 @@ public ResponseEntity scroll( _scrollRespClazz, result.getScrollId(), entities)); } - private void checkScrollAuthorized( - Authentication authentication, com.linkedin.metadata.models.EntitySpec entitySpec) { - String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); - - List> resourceSpecs = - List.of(Optional.of(new EntitySpec(entitySpec.getName(), ""))); - if (_restApiAuthorizationEnabled - && !AuthUtil.isAuthorizedForResources( - _authorizationChain, actorUrnStr, resourceSpecs, orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); - } - } - public ResponseEntity createDatasetProperties( - @Valid DatasetPropertiesAspectRequestV2 body, String urn) { + @Valid DatasetPropertiesAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -539,11 +597,16 @@ public ResponseEntity createDatasetProperties methodNameToAspectName(methodName), body, DatasetPropertiesAspectRequestV2.class, - DatasetPropertiesAspectResponseV2.class); + DatasetPropertiesAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createEditableDatasetProperties( - @Valid EditableDatasetPropertiesAspectRequestV2 body, String urn) { + @Valid EditableDatasetPropertiesAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -551,11 +614,16 @@ public ResponseEntity createEditableD methodNameToAspectName(methodName), body, EditableDatasetPropertiesAspectRequestV2.class, - EditableDatasetPropertiesAspectResponseV2.class); + EditableDatasetPropertiesAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createInstitutionalMemory( - @Valid InstitutionalMemoryAspectRequestV2 body, String urn) { + @Valid InstitutionalMemoryAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -563,11 +631,16 @@ public ResponseEntity createInstitutionalMe methodNameToAspectName(methodName), body, InstitutionalMemoryAspectRequestV2.class, - InstitutionalMemoryAspectResponseV2.class); + InstitutionalMemoryAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createChartInfo( - @Valid ChartInfoAspectRequestV2 body, String urn) { + @Valid ChartInfoAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -575,11 +648,16 @@ public ResponseEntity createChartInfo( methodNameToAspectName(methodName), body, ChartInfoAspectRequestV2.class, - ChartInfoAspectResponseV2.class); + ChartInfoAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createEditableChartProperties( - @Valid EditableChartPropertiesAspectRequestV2 body, String urn) { + @Valid EditableChartPropertiesAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -587,11 +665,16 @@ public ResponseEntity createEditableCha methodNameToAspectName(methodName), body, EditableChartPropertiesAspectRequestV2.class, - EditableChartPropertiesAspectResponseV2.class); + EditableChartPropertiesAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createDataProductProperties( - @Valid DataProductPropertiesAspectRequestV2 body, String urn) { + @Valid DataProductPropertiesAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -599,7 +682,9 @@ public ResponseEntity createDataProductPr methodNameToAspectName(methodName), body, DataProductPropertiesAspectRequestV2.class, - DataProductPropertiesAspectResponseV2.class); + DataProductPropertiesAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity deleteDatasetProperties(String urn) { @@ -746,7 +831,11 @@ public ResponseEntity deleteDataProductProperties(String urn) { return deleteAspect(urn, methodNameToAspectName(methodName)); } - public ResponseEntity createForms(FormsAspectRequestV2 body, String urn) { + public ResponseEntity createForms( + FormsAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -754,7 +843,9 @@ public ResponseEntity createForms(FormsAspectRequestV2 bo methodNameToAspectName(methodName), body, FormsAspectRequestV2.class, - FormsAspectResponseV2.class); + FormsAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity deleteForms(String urn) { @@ -782,7 +873,10 @@ public ResponseEntity headForms(String urn) { } public ResponseEntity createDynamicFormAssignment( - DynamicFormAssignmentAspectRequestV2 body, String urn) { + DynamicFormAssignmentAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -790,11 +884,16 @@ public ResponseEntity createDynamicFormAs methodNameToAspectName(methodName), body, DynamicFormAssignmentAspectRequestV2.class, - DynamicFormAssignmentAspectResponseV2.class); + DynamicFormAssignmentAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity createFormInfo( - FormInfoAspectRequestV2 body, String urn) { + FormInfoAspectRequestV2 body, + String urn, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { String methodName = walker.walk(frames -> frames.findFirst().map(StackWalker.StackFrame::getMethodName)).get(); return createAspect( @@ -802,7 +901,9 @@ public ResponseEntity createFormInfo( methodNameToAspectName(methodName), body, FormInfoAspectRequestV2.class, - FormInfoAspectResponseV2.class); + FormInfoAspectResponseV2.class, + createIfNotExists, + createEntityIfNotExists); } public ResponseEntity deleteDynamicFormAssignment(String urn) { diff --git a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache index f9717b8cb16fb..f7d7508c414e0 100644 --- a/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache +++ b/metadata-service/openapi-entity-servlet/src/main/resources/JavaSpring/apiController.mustache @@ -95,12 +95,11 @@ public class {{classname}}Controller implements {{classname}} { public {{classname}}Controller(ObjectMapper objectMapper, HttpServletRequest request, @org.springframework.beans.factory.annotation.Qualifier("systemOperationContext") OperationContext systemOperationContext, EntityService entityService, - SearchService searchService, EntitiesController v1Controller, AuthorizerChain authorizationChain, - @Value("${authorization.restApiAuthorization:false}") boolean restApiAuthorizationEnabled) { + SearchService searchService, EntitiesController v1Controller, AuthorizerChain authorizationChain) { this.objectMapper = objectMapper; this.request = request; this.delegate = new EntityApiDelegateImpl<{{requestClass}}, {{responseClass}}, {{scrollResponseClass}}>(systemOperationContext, entityService, searchService, v1Controller, - restApiAuthorizationEnabled, authorizationChain, {{requestClass}}.class, {{responseClass}}.class, {{scrollResponseClass}}.class); + authorizationChain, {{requestClass}}.class, {{responseClass}}.class, {{scrollResponseClass}}.class); } {{#isJava8or11}} diff --git a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java index d4217c9fd1b66..2d31e35c487aa 100644 --- a/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java +++ b/metadata-service/openapi-entity-servlet/src/test/java/io/datahubproject/openapi/v2/delegates/EntityApiDelegateImplTest.java @@ -85,7 +85,8 @@ public void chartApiControllerTest() { final String testUrn = "urn:li:chart:(looker,baz1)"; ChartEntityRequestV2 req = ChartEntityRequestV2.builder().urn(testUrn).build(); - ChartEntityResponseV2 resp = chartApiController.create(List.of(req)).getBody().get(0); + ChartEntityResponseV2 resp = + chartApiController.create(List.of(req), false, false).getBody().get(0); assertEquals(resp.getUrn(), testUrn); resp = chartApiController.get(testUrn, false, List.of()).getBody(); @@ -108,7 +109,8 @@ public void datasetApiControllerTest() { final String testUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,SampleKafkaDataset,PROD)"; DatasetEntityRequestV2 req = DatasetEntityRequestV2.builder().urn(testUrn).build(); - DatasetEntityResponseV2 resp = datasetApiController.create(List.of(req)).getBody().get(0); + DatasetEntityResponseV2 resp = + datasetApiController.create(List.of(req), false, false).getBody().get(0); assertEquals(resp.getUrn(), testUrn); resp = datasetApiController.get(testUrn, false, List.of()).getBody(); @@ -138,7 +140,8 @@ public void browsePathsTest() { .build()) .build(); assertEquals( - datasetApiController.createBrowsePathsV2(testUrn, req).getStatusCode(), HttpStatus.OK); + datasetApiController.createBrowsePathsV2(testUrn, req, false, false).getStatusCode(), + HttpStatus.OK); assertEquals(datasetApiController.deleteBrowsePathsV2(testUrn).getStatusCode(), HttpStatus.OK); assertEquals( datasetApiController.getBrowsePathsV2(testUrn, false).getStatusCode(), @@ -156,7 +159,8 @@ public void deprecationTest() { .value(Deprecation.builder().deprecated(true).build()) .build(); assertEquals( - datasetApiController.createDeprecation(testUrn, req).getStatusCode(), HttpStatus.OK); + datasetApiController.createDeprecation(testUrn, req, false, false).getStatusCode(), + HttpStatus.OK); assertEquals(datasetApiController.deleteDeprecation(testUrn).getStatusCode(), HttpStatus.OK); assertEquals( datasetApiController.getDeprecation(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); @@ -172,7 +176,9 @@ public void domainsTest() { DomainsAspectRequestV2.builder() .value(Domains.builder().domains(List.of("my_domain")).build()) .build(); - assertEquals(datasetApiController.createDomains(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.createDomains(testUrn, req, false, false).getStatusCode(), + HttpStatus.OK); assertEquals(datasetApiController.deleteDomains(testUrn).getStatusCode(), HttpStatus.OK); assertEquals( datasetApiController.getDomains(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); @@ -192,7 +198,9 @@ public void ownershipTest() { Owner.builder().owner("me").type(OwnershipType.BUSINESS_OWNER).build())) .build()) .build(); - assertEquals(datasetApiController.createOwnership(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.createOwnership(testUrn, req, false, false).getStatusCode(), + HttpStatus.OK); assertEquals(datasetApiController.deleteOwnership(testUrn).getStatusCode(), HttpStatus.OK); assertEquals( datasetApiController.getOwnership(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); @@ -205,7 +213,9 @@ public void statusTest() { StatusAspectRequestV2 req = StatusAspectRequestV2.builder().value(Status.builder().removed(true).build()).build(); - assertEquals(datasetApiController.createStatus(testUrn, req).getStatusCode(), HttpStatus.OK); + assertEquals( + datasetApiController.createStatus(testUrn, req, false, false).getStatusCode(), + HttpStatus.OK); assertEquals(datasetApiController.deleteStatus(testUrn).getStatusCode(), HttpStatus.OK); assertEquals( datasetApiController.getStatus(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); @@ -224,7 +234,8 @@ public void globalTagsTest() { .build()) .build(); assertEquals( - datasetApiController.createGlobalTags(testUrn, req).getStatusCode(), HttpStatus.OK); + datasetApiController.createGlobalTags(testUrn, req, false, false).getStatusCode(), + HttpStatus.OK); assertEquals(datasetApiController.deleteGlobalTags(testUrn).getStatusCode(), HttpStatus.OK); assertEquals( datasetApiController.getGlobalTags(testUrn, false).getStatusCode(), HttpStatus.NOT_FOUND); @@ -244,7 +255,8 @@ public void glossaryTermsTest() { .build()) .build(); assertEquals( - datasetApiController.createGlossaryTerms(testUrn, req).getStatusCode(), HttpStatus.OK); + datasetApiController.createGlossaryTerms(testUrn, req, false, false).getStatusCode(), + HttpStatus.OK); assertEquals(datasetApiController.deleteGlossaryTerms(testUrn).getStatusCode(), HttpStatus.OK); assertEquals( datasetApiController.getGlossaryTerms(testUrn, false).getStatusCode(), diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java index 63e78c30383af..e5796795c6f7a 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/entities/EntitiesController.java @@ -1,5 +1,9 @@ package io.datahubproject.openapi.entities; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; import com.codahale.metrics.MetricRegistry; @@ -8,17 +12,13 @@ import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthUtil; import com.datahub.authorization.AuthorizerChain; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.metadata.utils.metrics.MetricUtils; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.util.Pair; import io.datahubproject.openapi.dto.RollbackRunResultDto; import io.datahubproject.openapi.dto.UpsertAspectRequest; @@ -34,14 +34,12 @@ import java.util.HashSet; import java.util.List; import java.util.Objects; -import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.propertyeditors.StringArrayPropertyEditor; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; @@ -69,9 +67,6 @@ public class EntitiesController { private final ObjectMapper _objectMapper; private final AuthorizerChain _authorizerChain; - @Value("${authorization.restApiAuthorization:false}") - private boolean restApiAuthorizationEnabled; - @InitBinder public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); @@ -102,21 +97,11 @@ public ResponseEntity getEntities( log.debug("GET ENTITIES {}", entityUrns); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())))); - List> resourceSpecs = - entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled - && !AuthUtil.isAuthorizedForResources( - _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + if (!AuthUtil.isAPIAuthorizedEntityUrns(authentication, _authorizerChain, READ, entityUrns)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get entities."); } + if (entityUrns.size() <= 0) { return ResponseEntity.ok(UrnResponseMap.builder().responses(Collections.emptyMap()).build()); } @@ -155,37 +140,54 @@ public ResponseEntity getEntities( @PostMapping(value = "/", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity> postEntities( @RequestBody @Nonnull List aspectRequests, - @RequestParam(required = false, name = "async") Boolean async) { + @RequestParam(required = false, name = "async") Boolean async, + @RequestParam(required = false, name = "createIfNotExists") Boolean createIfNotExists, + @RequestParam(required = false, name = "createEntityIfNotExists") + Boolean createEntityIfNotExists) { + log.info("INGEST PROPOSAL proposal: {}", aspectRequests); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); + List proposals = aspectRequests.stream() - .map(MappingUtil::mapToProposal) + .map(req -> MappingUtil.mapToProposal(req, createIfNotExists, createEntityIfNotExists)) .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) .collect(Collectors.toList()); - if (restApiAuthorizationEnabled - && !MappingUtil.authorizeProposals( - proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); + /* + Ingest Authorization Checks + */ + List> exceptions = + isAPIAuthorized( + authentication, + _authorizerChain, + ENTITY, + _entityService.getEntityRegistry(), + proposals) + .stream() + .filter(p -> p.getSecond() != com.linkedin.restli.common.HttpStatus.S_200_OK.getCode()) + .collect(Collectors.toList()); + if (!exceptions.isEmpty()) { + throw new UnauthorizedException( + actorUrnStr + + " is unauthorized to edit entities. " + + exceptions.stream() + .map( + ex -> + String.format( + "HttpStatus: %s Urn: %s", + ex.getSecond(), ex.getFirst().getEntityUrn())) + .collect(Collectors.toList())); } boolean asyncBool = Objects.requireNonNullElseGet( async, () -> Boolean.parseBoolean(System.getenv("ASYNC_INGEST_DEFAULT"))); List> responses = - proposals.stream() - .map( - proposal -> - MappingUtil.ingestProposal(proposal, actorUrnStr, _entityService, asyncBool)) - .collect(Collectors.toList()); + MappingUtil.ingestBatchProposal(proposals, actorUrnStr, _entityService, asyncBool); + if (responses.stream().anyMatch(Pair::getSecond)) { return ResponseEntity.status(HttpStatus.CREATED) .body( @@ -219,11 +221,7 @@ public ResponseEntity> deleteEntities( try (Timer.Context context = MetricUtils.timer("deleteEntities").time()) { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); + final Set entityUrns = Arrays.stream(urns) // Have to decode here because of frontend routing, does No-op for already unencoded @@ -232,17 +230,9 @@ public ResponseEntity> deleteEntities( .map(UrnUtils::getUrn) .collect(Collectors.toSet()); - List> resourceSpecs = - entityUrns.stream() - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (restApiAuthorizationEnabled - && !AuthUtil.isAuthorizedForResources( - _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { - UnauthorizedException unauthorizedException = - new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); - exceptionally = unauthorizedException; - throw unauthorizedException; + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, _authorizerChain, DELETE, entityUrns)) { + throw new UnauthorizedException(actorUrnStr + " is unauthorized to delete entities."); } if (!soft) { @@ -267,7 +257,7 @@ public ResponseEntity> deleteEntities( RollbackRunResultDto.builder() .rowsRolledBack( deleteRequests.stream() - .map(MappingUtil::mapToProposal) + .map(req -> MappingUtil.mapToProposal(req, null, null)) .map( proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/openlineage/controller/LineageApiImpl.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/openlineage/controller/LineageApiImpl.java index b849ff588bee5..80fb1391bc948 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/openlineage/controller/LineageApiImpl.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/openlineage/controller/LineageApiImpl.java @@ -16,7 +16,6 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.RequestMapping; @@ -38,9 +37,6 @@ public class LineageApiImpl implements LineageApi { @Autowired private AuthorizerChain _authorizerChain; - @Value("${authorization.restApiAuthorization:false}") - private boolean restApiAuthorizationEnabled; - @Override public Optional getObjectMapper() { return Optional.of(OBJECT_MAPPER); diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java index f3617801e6b55..f7c6b4ec071c4 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/operations/elastic/OperationsController.java @@ -3,24 +3,30 @@ import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; +import com.linkedin.metadata.entity.restoreindices.RestoreIndicesResult; import com.linkedin.metadata.query.SearchFlags; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.filter.SortCriterion; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.timeseries.TimeseriesIndexSizeResult; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.RequestContext; import io.datahubproject.openapi.util.ElasticsearchUtils; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.Parameter; import io.swagger.v3.oas.annotations.tags.Tag; +import java.net.URISyntaxException; import java.util.List; +import java.util.Optional; +import java.util.Set; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -28,7 +34,6 @@ import org.json.JSONObject; import org.opensearch.action.explain.ExplainResponse; import org.opensearch.client.tasks.GetTaskResponse; -import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.propertyeditors.StringArrayPropertyEditor; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; @@ -36,6 +41,8 @@ import org.springframework.web.bind.WebDataBinder; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.InitBinder; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @@ -49,25 +56,23 @@ public class OperationsController { private final Authorizer authorizerChain; private final OperationContext systemOperationContext; - - @Value("${authorization.restApiAuthorization:false}") - private boolean restApiAuthorizationEnabled; - private final SystemMetadataService systemMetadataService; private final TimeseriesAspectService timeseriesAspectService; - private final EntitySearchService searchService; + private final EntityService entityService; public OperationsController( OperationContext systemOperationContext, SystemMetadataService systemMetadataService, TimeseriesAspectService timeseriesAspectService, - EntitySearchService searchService) { + EntitySearchService searchService, + EntityService entityService) { this.systemOperationContext = systemOperationContext; this.authorizerChain = systemOperationContext.getAuthorizerContext().getAuthorizer(); this.systemMetadataService = systemMetadataService; this.timeseriesAspectService = timeseriesAspectService; this.searchService = searchService; + this.entityService = entityService; } @InitBinder @@ -81,14 +86,9 @@ public void initBinder(WebDataBinder binder) { public ResponseEntity getTaskStatus(String task) { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled - && !AuthUtil.isAuthorizedForResources( - authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + + if (!AuthUtil.isAPIAuthorized( + authentication, authorizerChain, PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE)) { return ResponseEntity.status(HttpStatus.FORBIDDEN) .body(String.format(actorUrnStr + " is not authorized to get ElasticSearch task status")); } @@ -122,15 +122,9 @@ public ResponseEntity getTaskStatus(String task) { public ResponseEntity getIndexSizes() { Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of( - PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled - && !AuthUtil.isAuthorizedForResources( - authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + + if (!AuthUtil.isAPIAuthorized( + authentication, authorizerChain, PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE)) { return ResponseEntity.status(HttpStatus.FORBIDDEN) .body(String.format(actorUrnStr + " is not authorized to get timeseries index sizes")); } @@ -223,20 +217,18 @@ public ResponseEntity explainSearchQuery( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.ES_EXPLAIN_QUERY_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled - && !AuthUtil.isAuthorizedForResources( - authorizerChain, actorUrnStr, List.of(java.util.Optional.empty()), orGroup)) { + + if (!AuthUtil.isAPIAuthorized( + authentication, authorizerChain, PoliciesConfig.ES_EXPLAIN_QUERY_PRIVILEGE)) { log.error("{} is not authorized to get timeseries index sizes", actorUrnStr); return ResponseEntity.status(HttpStatus.FORBIDDEN).body(null); } OperationContext opContext = systemOperationContext - .asSession(authorizerChain, authentication) + .asSession( + RequestContext.builder().buildOpenapi("explainSearchQuery", entityName), + authorizerChain, + authentication) .withSearchFlags(flags -> searchFlags); ExplainResponse response = @@ -254,4 +246,59 @@ public ResponseEntity explainSearchQuery( return ResponseEntity.ok(response); } + + @Tag(name = "RestoreIndices") + @GetMapping(path = "/restoreIndices", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Restore ElasticSearch indices from primary storage based on URNs.") + public ResponseEntity restoreIndices( + @RequestParam(required = false, name = "aspectName") @Nullable String aspectName, + @RequestParam(required = false, name = "urn") @Nullable String urn, + @RequestParam(required = false, name = "urnLike") @Nullable String urnLike, + @RequestParam(required = false, name = "batchSize", defaultValue = "100") @Nullable + Integer batchSize, + @RequestParam(required = false, name = "start", defaultValue = "0") @Nullable Integer start) { + + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorized( + authentication, authorizerChain, PoliciesConfig.RESTORE_INDICES_PRIVILEGE)) { + return ResponseEntity.status(HttpStatus.FORBIDDEN).build(); + } + + RestoreIndicesArgs args = + new RestoreIndicesArgs() + .setAspectName(aspectName) + .setUrnLike(urnLike) + .setUrn( + Optional.ofNullable(urn) + .map(urnStr -> UrnUtils.getUrn(urnStr).toString()) + .orElse(null)) + .setStart(start) + .setBatchSize(batchSize); + + return ResponseEntity.of(Optional.of(entityService.restoreIndices(args, log::info))); + } + + @Tag(name = "RestoreIndices") + @PostMapping(path = "/restoreIndices", produces = MediaType.APPLICATION_JSON_VALUE) + @Operation(summary = "Restore ElasticSearch indices from primary storage based on URNs.") + public ResponseEntity> restoreIndices( + @RequestParam(required = false, name = "aspectNames") @Nullable Set aspectNames, + @RequestParam(required = false, name = "batchSize", defaultValue = "100") @Nullable + Integer batchSize, + @RequestBody @Nonnull Set urns) + throws RemoteInvocationException, URISyntaxException { + + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorized( + authentication, authorizerChain, PoliciesConfig.RESTORE_INDICES_PRIVILEGE)) { + return ResponseEntity.status(HttpStatus.FORBIDDEN).build(); + } + + return ResponseEntity.of( + Optional.of( + entityService.restoreIndices( + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toSet()), + aspectNames, + batchSize))); + } } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java index 7193da3bf8587..1ba68f66a5d5e 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/platform/entities/PlatformEntitiesController.java @@ -1,13 +1,12 @@ package io.datahubproject.openapi.platform.entities; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; + import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthorizerChain; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.metadata.search.client.CachingEntitySearchService; @@ -23,7 +22,6 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.propertyeditors.StringArrayPropertyEditor; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; @@ -50,9 +48,6 @@ public class PlatformEntitiesController { private final ObjectMapper _objectMapper; private final AuthorizerChain _authorizerChain; - @Value("${authorization.restApiAuthorization:false}") - private Boolean restApiAuthorizationEnabled; - @InitBinder public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); @@ -71,16 +66,31 @@ public ResponseEntity> postEntities( metadataChangeProposals.stream() .map(proposal -> MappingUtil.mapToServiceProposal(proposal, _objectMapper)) .collect(Collectors.toList()); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())))); - if (restApiAuthorizationEnabled - && !MappingUtil.authorizeProposals( - proposals, _entityService, _authorizerChain, actorUrnStr, orGroup)) { - throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); + /* + Ingest Authorization Checks + */ + List> exceptions = + isAPIAuthorized( + authentication, + _authorizerChain, + ENTITY, + _entityService.getEntityRegistry(), + proposals) + .stream() + .filter(p -> p.getSecond() != com.linkedin.restli.common.HttpStatus.S_200_OK.getCode()) + .collect(Collectors.toList()); + if (!exceptions.isEmpty()) { + throw new UnauthorizedException( + actorUrnStr + + " is unauthorized to edit entities. " + + exceptions.stream() + .map( + ex -> + String.format( + "HttpStatus: %s Urn: %s", + ex.getSecond(), ex.getFirst().getEntityUrn())) + .collect(Collectors.toList())); } boolean asyncBool = diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java index 4ceed6a11b973..98fc161699c48 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/relationships/RelationshipsController.java @@ -1,5 +1,7 @@ package io.datahubproject.openapi.relationships; +import static com.linkedin.metadata.authorization.ApiGroup.RELATIONSHIP; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static com.linkedin.metadata.search.utils.QueryUtils.*; import com.codahale.metrics.MetricRegistry; @@ -8,13 +10,8 @@ import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.AuthUtil; import com.datahub.authorization.AuthorizerChain; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.RelatedEntitiesResult; import com.linkedin.metadata.search.utils.QueryUtils; @@ -29,14 +26,11 @@ import java.net.URLDecoder; import java.nio.charset.Charset; import java.util.Arrays; -import java.util.Collections; import java.util.List; -import java.util.Optional; import javax.annotation.Nonnull; import javax.annotation.Nullable; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.propertyeditors.StringArrayPropertyEditor; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; @@ -63,9 +57,6 @@ public enum RelationshipDirection { private final GraphService _graphService; private final AuthorizerChain _authorizerChain; - @Value("${authorization.restApiAuthorization:false}") - private boolean restApiAuthorizationEnabled; - @InitBinder public void initBinder(WebDataBinder binder) { binder.registerCustomEditor(String[].class, new StringArrayPropertyEditor(null)); @@ -162,21 +153,9 @@ public ResponseEntity getRelationships( log.debug("GET Relationships {}", entityUrn); Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()) - // Re-using GET_ENTITY_PRIVILEGE here as it doesn't make sense to split the - // privileges between these APIs. - ))); - List> resourceSpecs = - Collections.singletonList( - Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString()))); - if (restApiAuthorizationEnabled - && !AuthUtil.isAuthorizedForResources( - _authorizerChain, actorUrnStr, resourceSpecs, orGroup)) { + if (!AuthUtil.isAPIAuthorizedUrns( + authentication, _authorizerChain, RELATIONSHIP, READ, List.of(entityUrn))) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to get relationships."); } diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java index 950b7cf6ae111..4f726b05b9cb8 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/timeline/TimelineController.java @@ -18,7 +18,6 @@ import io.swagger.v3.oas.annotations.tags.Tag; import java.net.URISyntaxException; import java.util.List; -import java.util.Optional; import java.util.Set; import lombok.AllArgsConstructor; import org.springframework.beans.factory.annotation.Value; @@ -76,8 +75,7 @@ public ResponseEntity> getTimeline( new ConjunctivePrivilegeGroup( ImmutableList.of(PoliciesConfig.GET_TIMELINE_PRIVILEGE.getType())))); if (restApiAuthorizationEnabled - && !AuthUtil.isAuthorized( - _authorizerChain, actorUrnStr, Optional.of(resourceSpec), orGroup)) { + && !AuthUtil.isAuthorized(_authorizerChain, actorUrnStr, orGroup, resourceSpec)) { throw new UnauthorizedException(actorUrnStr + " is unauthorized to edit entities."); } return ResponseEntity.ok( diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java index 6b31159a20665..0b3095e057676 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/util/MappingUtil.java @@ -6,10 +6,6 @@ import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.Timer; -import com.datahub.authorization.AuthUtil; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.datahub.plugins.auth.authorization.Authorizer; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -32,7 +28,6 @@ import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; import com.linkedin.metadata.entity.ebean.batch.ChangeItemImpl; import com.linkedin.metadata.entity.validation.ValidationException; -import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.metrics.MetricUtils; import com.linkedin.mxe.GenericAspect; import com.linkedin.mxe.SystemMetadata; @@ -417,32 +412,19 @@ public static GenericAspect convertGenericAspect( } } - public static boolean authorizeProposals( - List proposals, - EntityService entityService, - Authorizer authorizer, - String actorUrnStr, - DisjunctivePrivilegeGroup orGroup) { - List> resourceSpecs = - proposals.stream() - .map( - proposal -> { - com.linkedin.metadata.models.EntitySpec entitySpec = - entityService.getEntityRegistry().getEntitySpec(proposal.getEntityType()); - Urn entityUrn = - EntityKeyUtils.getUrnFromProposal(proposal, entitySpec.getKeyAspectSpec()); - return Optional.of( - new EntitySpec(proposal.getEntityType(), entityUrn.toString())); - }) - .collect(Collectors.toList()); - return AuthUtil.isAuthorizedForResources(authorizer, actorUrnStr, resourceSpecs, orGroup); - } - public static Pair ingestProposal( com.linkedin.mxe.MetadataChangeProposal serviceProposal, String actorUrn, EntityService entityService, boolean async) { + return ingestBatchProposal(List.of(serviceProposal), actorUrn, entityService, async).get(0); + } + + public static List> ingestBatchProposal( + List serviceProposals, + String actorUrn, + EntityService entityService, + boolean async) { // TODO: Use the actor present in the IC. Timer.Context context = MetricUtils.timer("postEntity").time(); @@ -451,19 +433,24 @@ public static Pair ingestProposal( .setTime(System.currentTimeMillis()) .setActor(UrnUtils.getUrn(actorUrn)); - log.info("Proposal: {}", serviceProposal); + log.info("Proposal: {}", serviceProposals); Throwable exceptionally = null; try { AspectsBatch batch = - AspectsBatchImpl.builder() - .mcps(List.of(serviceProposal), auditStamp, entityService) - .build(); + AspectsBatchImpl.builder().mcps(serviceProposals, auditStamp, entityService).build(); - Set proposalResult = entityService.ingestProposal(batch, async); + Map> resultMap = + entityService.ingestProposal(batch, async).stream() + .collect(Collectors.groupingBy(IngestResult::getUrn)); + + return resultMap.entrySet().stream() + .map( + entry -> + Pair.of( + entry.getKey().toString(), + entry.getValue().stream().anyMatch(IngestResult::isSqlCommitted))) + .collect(Collectors.toList()); - Urn urn = proposalResult.stream().findFirst().get().getUrn(); - return new Pair<>( - urn.toString(), proposalResult.stream().anyMatch(IngestResult::isSqlCommitted)); } catch (ValidationException ve) { exceptionally = ve; throw HttpClientErrorException.create( @@ -481,7 +468,10 @@ public static Pair ingestProposal( } } - public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectRequest) { + public static MetadataChangeProposal mapToProposal( + UpsertAspectRequest aspectRequest, + @Nullable Boolean createIfNotExists, + @Nullable Boolean createEntityIfNotExists) { MetadataChangeProposal.MetadataChangeProposalBuilder metadataChangeProposal = MetadataChangeProposal.builder(); io.datahubproject.openapi.generated.GenericAspect genericAspect = @@ -497,9 +487,19 @@ public static MetadataChangeProposal mapToProposal(UpsertAspectRequest aspectReq .value(aspectRequest.getEntityKeyAspect()) .build(); } + + final io.datahubproject.openapi.generated.ChangeType changeType; + if (Boolean.TRUE.equals(createEntityIfNotExists)) { + changeType = io.datahubproject.openapi.generated.ChangeType.CREATE_ENTITY; + } else if (Boolean.TRUE.equals(createIfNotExists)) { + changeType = io.datahubproject.openapi.generated.ChangeType.CREATE; + } else { + changeType = io.datahubproject.openapi.generated.ChangeType.UPSERT; + } + metadataChangeProposal .aspect(genericAspect) - .changeType(io.datahubproject.openapi.generated.ChangeType.UPSERT) + .changeType(changeType) .aspectName(ASPECT_NAME_MAP.get(aspectRequest.getAspect().getClass())) .entityKeyAspect(keyAspect) .entityUrn(aspectRequest.getEntityUrn()) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java index 55bb8ebe625ae..48109ab2b1ba4 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/EntityController.java @@ -1,23 +1,27 @@ package io.datahubproject.openapi.v2.controller; -import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; +import static com.linkedin.metadata.authorization.ApiOperation.CREATE; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.EXISTS; +import static com.linkedin.metadata.authorization.ApiOperation.READ; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.AuthorizerChain; import com.datahub.util.RecordUtils; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.ByteString; import com.linkedin.data.template.RecordTemplate; import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.aspect.batch.ChangeMCP; import com.linkedin.metadata.aspect.patch.GenericJsonPatch; import com.linkedin.metadata.aspect.patch.template.common.GenericPatchTemplate; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.UpdateAspectResult; import com.linkedin.metadata.entity.ebean.batch.AspectsBatchImpl; @@ -38,6 +42,8 @@ import com.linkedin.mxe.SystemMetadata; import com.linkedin.util.Pair; import io.datahubproject.metadata.context.OperationContext; +import io.datahubproject.metadata.context.RequestContext; +import io.datahubproject.openapi.exception.UnauthorizedException; import io.datahubproject.openapi.v2.models.BatchGetUrnRequest; import io.datahubproject.openapi.v2.models.BatchGetUrnResponse; import io.datahubproject.openapi.v2.models.GenericEntity; @@ -85,7 +91,6 @@ public class EntityController { @Autowired private SearchService searchService; @Autowired private EntityService entityService; @Autowired private AuthorizerChain authorizationChain; - @Autowired private boolean restApiAuthorizationEnabled; @Autowired private ObjectMapper objectMapper; @Qualifier("systemOperationContext") @@ -109,18 +114,20 @@ public ResponseEntity> getEntities( throws URISyntaxException { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - Authentication authentication = AuthenticationContext.getAuthentication(); - if (restApiAuthorizationEnabled) { - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + + if (!AuthUtil.isAPIAuthorizedEntityType(authentication, authorizationChain, READ, entityName)) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " entities."); } + OperationContext opContext = OperationContext.asSession( - systemOperationContext, authorizationChain, authentication, true); + systemOperationContext, + RequestContext.builder().buildOpenapi("getEntities", entityName), + authorizationChain, + authentication, + true); // TODO: support additional and multiple sort params SortCriterion sortCriterion = SearchUtil.sortBy(sortField, SortOrder.valueOf(sortOrder)); @@ -136,6 +143,11 @@ public ResponseEntity> getEntities( null, count); + if (!AuthUtil.isAPIAuthorizedResult(authentication, authorizationChain, result)) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " entities."); + } + return ResponseEntity.ok( GenericScrollResult.builder() .results(toRecordTemplates(result.getEntities(), aspectNames, withSystemMetadata)) @@ -150,19 +162,12 @@ public ResponseEntity getEntityBatch( @PathVariable("entityName") String entityName, @RequestBody BatchGetUrnRequest request) throws URISyntaxException { - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - request - .getUrns() - .forEach( - entityUrn -> - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType()))); + List urns = request.getUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns(authentication, authorizationChain, READ, urns)) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " entities."); } return ResponseEntity.of( @@ -171,9 +176,7 @@ public ResponseEntity getEntityBatch( .entities( new ArrayList<>( toRecordTemplates( - request.getUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()), + urns, new HashSet<>(request.getAspectNames()), request.isWithSystemMetadata()))) .build())); @@ -191,21 +194,16 @@ public ResponseEntity getEntity( Boolean withSystemMetadata) throws URISyntaxException { - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + Urn urn = UrnUtils.getUrn(entityUrn); + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, READ, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " entities."); } return ResponseEntity.of( - toRecordTemplates(List.of(UrnUtils.getUrn(entityUrn)), aspectNames, withSystemMetadata) - .stream() - .findFirst()); + toRecordTemplates(List.of(urn), aspectNames, withSystemMetadata).stream().findFirst()); } @Tag(name = "Generic Entities") @@ -216,18 +214,15 @@ public ResponseEntity getEntity( public ResponseEntity headEntity( @PathVariable("entityName") String entityName, @PathVariable("entityUrn") String entityUrn) { - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + Urn urn = UrnUtils.getUrn(entityUrn); + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, EXISTS, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + EXISTS + " entities."); } - return exists(UrnUtils.getUrn(entityUrn), null) + return exists(urn, null) ? ResponseEntity.noContent().build() : ResponseEntity.notFound().build(); } @@ -243,19 +238,16 @@ public ResponseEntity getAspect( @PathVariable("aspectName") String aspectName) throws URISyntaxException { - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + Urn urn = UrnUtils.getUrn(entityUrn); + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, READ, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " entities."); } return ResponseEntity.of( - toRecordTemplates(List.of(UrnUtils.getUrn(entityUrn)), Set.of(aspectName), true).stream() + toRecordTemplates(List.of(urn), Set.of(aspectName), true).stream() .findFirst() .flatMap(e -> e.getAspects().values().stream().findFirst())); } @@ -270,18 +262,15 @@ public ResponseEntity headAspect( @PathVariable("entityUrn") String entityUrn, @PathVariable("aspectName") String aspectName) { - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + Urn urn = UrnUtils.getUrn(entityUrn); + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, EXISTS, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + EXISTS + " entities."); } - return exists(UrnUtils.getUrn(entityUrn), aspectName) + return exists(urn, aspectName) ? ResponseEntity.noContent().build() : ResponseEntity.notFound().build(); } @@ -293,15 +282,12 @@ public void deleteEntity( @PathVariable("entityName") String entityName, @PathVariable("entityUrn") String entityUrn) { EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())); + Urn urn = UrnUtils.getUrn(entityUrn); + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, DELETE, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + DELETE + " entities."); } entityService.deleteAspect(entityUrn, entitySpec.getKeyAspectName(), Map.of(), true); @@ -315,15 +301,12 @@ public void deleteAspect( @PathVariable("entityUrn") String entityUrn, @PathVariable("aspectName") String aspectName) { - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())); + Urn urn = UrnUtils.getUrn(entityUrn); + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, DELETE, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + DELETE + " entities."); } entityService.deleteAspect(entityUrn, aspectName, Map.of(), true); @@ -340,24 +323,24 @@ public ResponseEntity createAspect( @PathVariable("aspectName") String aspectName, @RequestParam(value = "systemMetadata", required = false, defaultValue = "false") Boolean withSystemMetadata, + @RequestParam(value = "createIfNotExists", required = false, defaultValue = "false") + Boolean createIfNotExists, @RequestBody @Nonnull String jsonAspect) throws URISyntaxException { + Urn urn = UrnUtils.getUrn(entityUrn); EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); Authentication authentication = AuthenticationContext.getAuthentication(); - if (restApiAuthorizationEnabled) { - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, CREATE, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + CREATE + " entities."); } AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); ChangeMCP upsert = - toUpsertItem(UrnUtils.getUrn(entityUrn), aspectSpec, jsonAspect, authentication.getActor()); + toUpsertItem(urn, aspectSpec, createIfNotExists, jsonAspect, authentication.getActor()); List results = entityService.ingestAspects( @@ -403,20 +386,16 @@ public ResponseEntity patchAspect( InstantiationException, IllegalAccessException { + Urn urn = UrnUtils.getUrn(entityUrn); EntitySpec entitySpec = entityRegistry.getEntitySpec(entityName); Authentication authentication = AuthenticationContext.getAuthentication(); - - if (restApiAuthorizationEnabled) { - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpec, - entityUrn, - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + if (!AuthUtil.isAPIAuthorizedEntityUrns( + authentication, authorizationChain, UPDATE, List.of(urn))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + UPDATE + " entities."); } - RecordTemplate currentValue = - entityService.getAspect(UrnUtils.getUrn(entityUrn), aspectName, 0); + RecordTemplate currentValue = entityService.getAspect(urn, aspectName, 0); AspectSpec aspectSpec = entitySpec.getAspectSpec(aspectName); GenericPatchTemplate genericPatchTemplate = @@ -537,11 +516,16 @@ private RecordTemplate toRecordTemplate(AspectSpec aspectSpec, EnvelopedAspect e } private ChangeMCP toUpsertItem( - Urn entityUrn, AspectSpec aspectSpec, String jsonAspect, Actor actor) + Urn entityUrn, + AspectSpec aspectSpec, + Boolean createIfNotExists, + String jsonAspect, + Actor actor) throws URISyntaxException { return ChangeItemImpl.builder() .urn(entityUrn) .aspectName(aspectSpec.getName()) + .changeType(Boolean.TRUE.equals(createIfNotExists) ? ChangeType.CREATE : ChangeType.UPSERT) .auditStamp(AuditStampUtils.createAuditStamp(actor.toUrnStr())) .recordTemplate( GenericRecordUtils.deserializeAspect( diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java index 09bd9f8bb09e5..2ac16e8c273db 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/RelationshipController.java @@ -1,19 +1,18 @@ package io.datahubproject.openapi.v2.controller; -import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; +import static com.linkedin.metadata.authorization.ApiGroup.RELATIONSHIP; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.AuthorizerChain; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.graph.Edge; import com.linkedin.metadata.graph.RelatedEntities; import com.linkedin.metadata.graph.RelatedEntitiesScrollResult; import com.linkedin.metadata.graph.elastic.ElasticSearchGraphService; -import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.RelationshipDirection; import com.linkedin.metadata.query.filter.RelationshipFilter; @@ -21,13 +20,13 @@ import com.linkedin.metadata.query.filter.SortOrder; import com.linkedin.metadata.search.utils.QueryUtils; import com.linkedin.metadata.utils.SearchUtil; +import io.datahubproject.openapi.exception.UnauthorizedException; import io.datahubproject.openapi.v2.models.GenericRelationship; import io.datahubproject.openapi.v2.models.GenericScrollResult; import io.swagger.v3.oas.annotations.Operation; import io.swagger.v3.oas.annotations.tags.Tag; import java.util.Arrays; import java.util.List; -import java.util.Set; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -65,8 +64,6 @@ public class RelationshipController { @Autowired private ElasticSearchGraphService graphService; @Autowired private AuthorizerChain authorizationChain; - @Autowired private boolean restApiAuthorizationEnabled; - /** * Returns relationship edges by type * @@ -82,6 +79,16 @@ public ResponseEntity> getRelationships @RequestParam(value = "count", defaultValue = "10") Integer count, @RequestParam(value = "scrollId", required = false) String scrollId) { + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorized(authentication, authorizationChain, RELATIONSHIP, READ)) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + + " is unauthorized to " + + READ + + " " + + RELATIONSHIP); + } + RelatedEntitiesScrollResult result = graphService.scrollRelatedEntities( null, @@ -96,24 +103,24 @@ public ResponseEntity> getRelationships null, null); - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - Set entitySpecs = - result.getEntities().stream() - .flatMap( - relatedEntity -> - Stream.of( - entityRegistry.getEntitySpec( - UrnUtils.getUrn(relatedEntity.getUrn()).getEntityType()), - entityRegistry.getEntitySpec( - UrnUtils.getUrn(relatedEntity.getSourceUrn()).getEntityType()))) - .collect(Collectors.toSet()); - - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpecs, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + if (!AuthUtil.isAPIAuthorizedUrns( + authentication, + authorizationChain, + RELATIONSHIP, + READ, + result.getEntities().stream() + .flatMap( + edge -> + Stream.of( + UrnUtils.getUrn(edge.getSourceUrn()), + UrnUtils.getUrn(edge.getDestinationUrn()))) + .collect(Collectors.toSet()))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + + " is unauthorized to " + + READ + + " " + + RELATIONSHIP); } return ResponseEntity.ok( @@ -145,6 +152,21 @@ public ResponseEntity> getRelationships final RelatedEntitiesScrollResult result; + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedUrns( + authentication, + authorizationChain, + RELATIONSHIP, + READ, + List.of(UrnUtils.getUrn(entityUrn)))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + + " is unauthorized to " + + READ + + " " + + RELATIONSHIP); + } + switch (RelationshipDirection.valueOf(direction.toUpperCase())) { case INCOMING -> result = graphService.scrollRelatedEntities( @@ -183,24 +205,24 @@ public ResponseEntity> getRelationships default -> throw new IllegalArgumentException("Direction must be INCOMING or OUTGOING"); } - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - Set entitySpecs = - result.getEntities().stream() - .flatMap( - relatedEntity -> - Stream.of( - entityRegistry.getEntitySpec( - UrnUtils.getUrn(relatedEntity.getDestinationUrn()).getEntityType()), - entityRegistry.getEntitySpec( - UrnUtils.getUrn(relatedEntity.getSourceUrn()).getEntityType()))) - .collect(Collectors.toSet()); - - checkAuthorized( - authorizationChain, - authentication.getActor(), - entitySpecs, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + if (!AuthUtil.isAPIAuthorizedUrns( + authentication, + authorizationChain, + RELATIONSHIP, + READ, + result.getEntities().stream() + .flatMap( + edge -> + Stream.of( + UrnUtils.getUrn(edge.getSourceUrn()), + UrnUtils.getUrn(edge.getDestinationUrn()))) + .collect(Collectors.toSet()))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + + " is unauthorized to " + + READ + + " " + + RELATIONSHIP); } return ResponseEntity.ok( diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java index ab12b68339011..9aa49ea0582fc 100644 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java +++ b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/controller/TimeseriesController.java @@ -1,12 +1,13 @@ package io.datahubproject.openapi.v2.controller; -import static io.datahubproject.openapi.v2.utils.ControllerUtil.checkAuthorized; +import static com.linkedin.metadata.authorization.ApiGroup.TIMESERIES; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.AuthorizerChain; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.authorization.PoliciesConfig; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.metadata.query.filter.SortCriterion; @@ -15,6 +16,7 @@ import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.metadata.timeseries.TimeseriesScrollResult; import com.linkedin.metadata.utils.SearchUtil; +import io.datahubproject.openapi.exception.UnauthorizedException; import io.datahubproject.openapi.v2.models.GenericScrollResult; import io.datahubproject.openapi.v2.models.GenericTimeseriesAspect; import io.swagger.v3.oas.annotations.tags.Tag; @@ -47,8 +49,6 @@ public class TimeseriesController { @Autowired private AuthorizerChain authorizationChain; - @Autowired private boolean restApiAuthorizationEnabled; - @GetMapping(value = "/{entityName}/{aspectName}", produces = MediaType.APPLICATION_JSON_VALUE) public ResponseEntity> getAspects( @PathVariable("entityName") String entityName, @@ -61,13 +61,12 @@ public ResponseEntity> getAspects( Boolean withSystemMetadata) throws URISyntaxException { - if (restApiAuthorizationEnabled) { - Authentication authentication = AuthenticationContext.getAuthentication(); - checkAuthorized( - authorizationChain, - authentication.getActor(), - entityRegistry.getEntitySpec(entityName), - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE.getType())); + Authentication authentication = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorized(authentication, authorizationChain, TIMESERIES, READ) + || !AuthUtil.isAPIAuthorizedEntityType( + authentication, authorizationChain, READ, entityName)) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " " + TIMESERIES); } AspectSpec aspectSpec = entityRegistry.getEntitySpec(entityName).getAspectSpec(aspectName); @@ -91,6 +90,18 @@ public ResponseEntity> getAspects( startTimeMillis, endTimeMillis); + if (!AuthUtil.isAPIAuthorizedUrns( + authentication, + authorizationChain, + TIMESERIES, + READ, + result.getDocuments().stream() + .map(doc -> UrnUtils.getUrn(doc.getUrn())) + .collect(Collectors.toSet()))) { + throw new UnauthorizedException( + authentication.getActor().toUrnStr() + " is unauthorized to " + READ + " entities."); + } + return ResponseEntity.ok( GenericScrollResult.builder() .scrollId(result.getScrollId()) diff --git a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java b/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java deleted file mode 100644 index 70d588721d3b3..0000000000000 --- a/metadata-service/openapi-servlet/src/main/java/io/datahubproject/openapi/v2/utils/ControllerUtil.java +++ /dev/null @@ -1,67 +0,0 @@ -package io.datahubproject.openapi.v2.utils; - -import com.datahub.authentication.Actor; -import com.datahub.authorization.AuthUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.models.EntitySpec; -import io.datahubproject.openapi.exception.UnauthorizedException; -import java.util.List; -import java.util.Optional; -import java.util.Set; -import java.util.stream.Collectors; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -public class ControllerUtil { - private ControllerUtil() {} - - public static void checkAuthorized( - @Nonnull Authorizer authorizationChain, - @Nonnull Actor actor, - @Nonnull EntitySpec entitySpec, - @Nonnull List privileges) { - checkAuthorized(authorizationChain, actor, entitySpec, null, privileges); - } - - public static void checkAuthorized( - @Nonnull Authorizer authorizationChain, - @Nonnull Actor actor, - @Nonnull Set entitySpecs, - @Nonnull List privileges) { - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup(privileges))); - List> resourceSpecs = - entitySpecs.stream() - .map( - entitySpec -> - Optional.of(new com.datahub.authorization.EntitySpec(entitySpec.getName(), ""))) - .collect(Collectors.toList()); - if (!AuthUtil.isAuthorizedForResources( - authorizationChain, actor.toUrnStr(), resourceSpecs, orGroup)) { - throw new UnauthorizedException(actor.toUrnStr() + " is unauthorized to get entities."); - } - } - - public static void checkAuthorized( - @Nonnull Authorizer authorizationChain, - @Nonnull Actor actor, - @Nonnull EntitySpec entitySpec, - @Nullable String entityUrn, - @Nonnull List privileges) { - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup(ImmutableList.of(new ConjunctivePrivilegeGroup(privileges))); - - List> resourceSpecs = - List.of( - Optional.of( - new com.datahub.authorization.EntitySpec( - entitySpec.getName(), entityUrn != null ? entityUrn : ""))); - if (!AuthUtil.isAuthorizedForResources( - authorizationChain, actor.toUrnStr(), resourceSpecs, orGroup)) { - throw new UnauthorizedException(actor.toUrnStr() + " is unauthorized to get entities."); - } - } -} diff --git a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java index 4cecfe2128199..5af7e752ef059 100644 --- a/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java +++ b/metadata-service/openapi-servlet/src/test/java/entities/EntitiesControllerTest.java @@ -214,7 +214,7 @@ public void testIngestDataset() { .build(); datasetAspects.add(glossaryTerms); - _entitiesController.postEntities(datasetAspects, false); + _entitiesController.postEntities(datasetAspects, false, false, false); } // @Test diff --git a/metadata-service/openapi-servlet/src/test/resources/application.properties b/metadata-service/openapi-servlet/src/test/resources/application.properties index cb5f16b3b6558..d08fd96a19af9 100644 --- a/metadata-service/openapi-servlet/src/test/resources/application.properties +++ b/metadata-service/openapi-servlet/src/test/resources/application.properties @@ -9,5 +9,5 @@ authentication.enabled=false spring.main.allow-bean-definition-overriding=true #--- spring.config.activate.on-profile=test -server.port=53222 -local.server.port=53222 +server.port=53223 +local.server.port=53223 diff --git a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json index 7284cd2bac48f..6bd35783da614 100644 --- a/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json +++ b/metadata-service/restli-api/src/main/idl/com.linkedin.entity.runs.restspec.json @@ -36,7 +36,7 @@ }, { "name" : "list", "javaMethodName" : "list", - "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", + "doc" : "Retrieves the ingestion run summaries.", "parameters" : [ { "name" : "pageOffset", "type" : "int", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json index 4375dfa587e7d..2a96e9963bf01 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.aspects.snapshot.json @@ -1231,12 +1231,12 @@ "items" : "Urn" } }, - "doc" : "Owners to ownership type map, populated with mutation hook.", + "doc" : "Ownership type to Owners map, populated via mutation hook.", "default" : { }, "optional" : true, "Searchable" : { "/*" : { - "fieldType" : "OBJECT", + "fieldType" : "MAP_ARRAY", "queryByDefault" : false } } @@ -2070,11 +2070,12 @@ "name" : "ChangeType", "namespace" : "com.linkedin.events.metadata", "doc" : "Descriptor for a change action", - "symbols" : [ "UPSERT", "CREATE", "UPDATE", "DELETE", "PATCH", "RESTATE" ], + "symbols" : [ "UPSERT", "CREATE", "UPDATE", "DELETE", "PATCH", "RESTATE", "CREATE_ENTITY" ], "symbolDocs" : { - "CREATE" : "NOT SUPPORTED YET\ninsert if not exists. otherwise fail", - "DELETE" : "NOT SUPPORTED YET\ndelete action", - "PATCH" : "NOT SUPPORTED YET\npatch the changes instead of full replace", + "CREATE" : "insert if not exists. otherwise fail", + "CREATE_ENTITY" : "insert if entity not exists. otherwise fail", + "DELETE" : "delete action", + "PATCH" : "patch the changes instead of full replace", "RESTATE" : "Restate an aspect, eg. in a index refresh.", "UPDATE" : "NOT SUPPORTED YET\nupdate if exists. otherwise fail", "UPSERT" : "insert if not exists. otherwise update" diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json index 4a70fb9a3a6ad..6ba738965a006 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.entities.snapshot.json @@ -1267,12 +1267,12 @@ "items" : "Urn" } }, - "doc" : "Owners to ownership type map, populated with mutation hook.", + "doc" : "Ownership type to Owners map, populated via mutation hook.", "default" : { }, "optional" : true, "Searchable" : { "/*" : { - "fieldType" : "OBJECT", + "fieldType" : "MAP_ARRAY", "queryByDefault" : false } } diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json index 2168ee950957a..6100073f1fbc9 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.entity.runs.snapshot.json @@ -973,12 +973,12 @@ "items" : "Urn" } }, - "doc" : "Owners to ownership type map, populated with mutation hook.", + "doc" : "Ownership type to Owners map, populated via mutation hook.", "default" : { }, "optional" : true, "Searchable" : { "/*" : { - "fieldType" : "OBJECT", + "fieldType" : "MAP_ARRAY", "queryByDefault" : false } } @@ -3816,7 +3816,7 @@ }, { "name" : "list", "javaMethodName" : "list", - "doc" : "Retrieves the value for an entity that is made up of latest versions of specified aspects.", + "doc" : "Retrieves the ingestion run summaries.", "parameters" : [ { "name" : "pageOffset", "type" : "int", diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json index 2c093f753e3a6..d7199bed56d2c 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.operations.operations.snapshot.json @@ -973,12 +973,12 @@ "items" : "Urn" } }, - "doc" : "Owners to ownership type map, populated with mutation hook.", + "doc" : "Ownership type to Owners map, populated via mutation hook.", "default" : { }, "optional" : true, "Searchable" : { "/*" : { - "fieldType" : "OBJECT", + "fieldType" : "MAP_ARRAY", "queryByDefault" : false } } diff --git a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json index 04fbf98ebc36c..4a1f24d527b89 100644 --- a/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json +++ b/metadata-service/restli-api/src/main/snapshot/com.linkedin.platform.platform.snapshot.json @@ -1267,12 +1267,12 @@ "items" : "Urn" } }, - "doc" : "Owners to ownership type map, populated with mutation hook.", + "doc" : "Ownership type to Owners map, populated via mutation hook.", "default" : { }, "optional" : true, "Searchable" : { "/*" : { - "fieldType" : "OBJECT", + "fieldType" : "MAP_ARRAY", "queryByDefault" : false } } diff --git a/metadata-service/restli-client-api/build.gradle b/metadata-service/restli-client-api/build.gradle new file mode 100644 index 0000000000000..c2dfd4d2a1344 --- /dev/null +++ b/metadata-service/restli-client-api/build.gradle @@ -0,0 +1,14 @@ +plugins { + id 'java-library' +} + +dependencies { + api project(path: ':metadata-service:restli-api', configuration: 'restClient') + api project(':metadata-operation-context') + implementation project(':metadata-service:configuration') + + implementation externalDependency.caffeine + implementation externalDependency.slf4jApi + compileOnly externalDependency.lombok + annotationProcessor externalDependency.lombok +} diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/common/client/BaseClient.java similarity index 100% rename from metadata-service/restli-client/src/main/java/com/linkedin/common/client/BaseClient.java rename to metadata-service/restli-client-api/src/main/java/com/linkedin/common/client/BaseClient.java diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/common/client/ClientCache.java similarity index 100% rename from metadata-service/restli-client/src/main/java/com/linkedin/common/client/ClientCache.java rename to metadata-service/restli-client-api/src/main/java/com/linkedin/common/client/ClientCache.java diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java similarity index 100% rename from metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClient.java rename to metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClient.java diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClientCache.java similarity index 100% rename from metadata-service/restli-client/src/main/java/com/linkedin/entity/client/EntityClientCache.java rename to metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/EntityClientCache.java diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/SystemEntityClient.java similarity index 100% rename from metadata-service/restli-client/src/main/java/com/linkedin/entity/client/SystemEntityClient.java rename to metadata-service/restli-client-api/src/main/java/com/linkedin/entity/client/SystemEntityClient.java diff --git a/metadata-service/restli-client-api/src/main/java/com/linkedin/usage/UsageClient.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/usage/UsageClient.java new file mode 100644 index 0000000000000..dbae8edb1a381 --- /dev/null +++ b/metadata-service/restli-client-api/src/main/java/com/linkedin/usage/UsageClient.java @@ -0,0 +1,13 @@ +package com.linkedin.usage; + +import com.linkedin.common.EntityRelationships; +import javax.annotation.Nonnull; + +public interface UsageClient { + /** + * Gets a specific version of downstream {@link EntityRelationships} for the given dataset. Using + * cache and system authentication. Validate permissions before use! + */ + @Nonnull + UsageQueryResult getUsageStats(@Nonnull String resource, @Nonnull UsageTimeRange range); +} diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java b/metadata-service/restli-client-api/src/main/java/com/linkedin/usage/UsageClientCache.java similarity index 100% rename from metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClientCache.java rename to metadata-service/restli-client-api/src/main/java/com/linkedin/usage/UsageClientCache.java diff --git a/metadata-service/restli-client/build.gradle b/metadata-service/restli-client/build.gradle index 9bee54da9ff6e..b08e75c8f28e6 100644 --- a/metadata-service/restli-client/build.gradle +++ b/metadata-service/restli-client/build.gradle @@ -10,6 +10,7 @@ dependencies { api project(':metadata-events:mxe-schemas') api project(':metadata-utils') api project(':metadata-operation-context') + api project(':metadata-service:restli-client-api') implementation project(':metadata-service:configuration') implementation externalDependency.caffeine diff --git a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java b/metadata-service/restli-client/src/main/java/com/linkedin/usage/RestliUsageClient.java similarity index 96% rename from metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java rename to metadata-service/restli-client/src/main/java/com/linkedin/usage/RestliUsageClient.java index 461c2e50fac54..2e523eb4965be 100644 --- a/metadata-service/restli-client/src/main/java/com/linkedin/usage/UsageClient.java +++ b/metadata-service/restli-client/src/main/java/com/linkedin/usage/RestliUsageClient.java @@ -12,7 +12,7 @@ import java.net.URISyntaxException; import javax.annotation.Nonnull; -public class UsageClient extends BaseClient { +public class RestliUsageClient extends BaseClient implements UsageClient { private static final UsageStatsRequestBuilders USAGE_STATS_REQUEST_BUILDERS = new UsageStatsRequestBuilders(); @@ -20,7 +20,7 @@ public class UsageClient extends BaseClient { private final OperationContext systemOperationContext; private final UsageClientCache usageClientCache; - public UsageClient( + public RestliUsageClient( @Nonnull OperationContext systemOperationContext, @Nonnull final Client restliClient, @Nonnull final BackoffPolicy backoffPolicy, diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java index b8cbf1ceb6794..bd64784f28d08 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/analytics/Analytics.java @@ -1,10 +1,16 @@ package com.linkedin.metadata.resources.analytics; +import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; +import com.datahub.plugins.auth.authorization.Authorizer; import com.linkedin.analytics.GetTimeseriesAggregatedStatsResponse; +import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.resources.restli.RestliUtils; import com.linkedin.metadata.timeseries.TimeseriesAspectService; import com.linkedin.parseq.Task; +import com.linkedin.restli.common.HttpStatus; +import com.linkedin.restli.server.RestLiServiceException; import com.linkedin.restli.server.annotations.Action; import com.linkedin.restli.server.annotations.ActionParam; import com.linkedin.restli.server.annotations.Optional; @@ -22,6 +28,10 @@ import javax.inject.Named; import lombok.extern.slf4j.Slf4j; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.linkedin.metadata.authorization.ApiGroup.TIMESERIES; +import static com.linkedin.metadata.authorization.ApiOperation.READ; + /** Rest.li entry point: /analytics */ @Slf4j @RestLiSimpleResource(name = "analytics", namespace = "com.linkedin.analytics") @@ -35,7 +45,11 @@ public class Analytics extends SimpleResourceTemplate getTimeseriesStats( @ActionParam(PARAM_BUCKETS) @Optional @Nullable GroupingBucket[] groupingBuckets) { return RestliUtils.toTask( () -> { + if (!AuthUtil.isAPIAuthorizedEntityType( + AuthenticationContext.getAuthentication(), + authorizer, + TIMESERIES, READ, + entityName)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity " + entityName); + } + log.info("Attempting to query timeseries stats"); GetTimeseriesAggregatedStatsResponse resp = new GetTimeseriesAggregatedStatsResponse(); resp.setEntityName(entityName); @@ -60,7 +83,7 @@ public Task getTimeseriesStats( } GenericTable aggregatedStatsTable = - _timeseriesAspectService.getAggregatedStats( + timeseriesAspectService.getAggregatedStats( entityName, aspectName, aggregationSpecs, filter, groupingBuckets); resp.setTable(aggregatedStatsTable); return resp; diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java index 8658096b17437..a84afefb57c82 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/AspectResource.java @@ -1,9 +1,14 @@ package com.linkedin.metadata.resources.entity; -import static com.linkedin.metadata.Constants.*; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.datahub.authorization.AuthUtil.isAPIAuthorizedEntityUrns; +import static com.datahub.authorization.AuthUtil.isAPIAuthorizedUrns; +import static com.linkedin.metadata.authorization.ApiGroup.COUNTS; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiGroup.TIMESERIES; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static com.linkedin.metadata.resources.operations.OperationsResource.*; import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; @@ -11,12 +16,12 @@ import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; import com.linkedin.aspect.GetTimeseriesAspectValuesResponse; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.aspect.EnvelopedAspectArray; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.authorization.Disjunctive; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.IngestResult; @@ -29,7 +34,6 @@ import com.linkedin.metadata.restli.RestliUtil; import com.linkedin.metadata.search.EntitySearchService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; import com.linkedin.parseq.Task; @@ -43,11 +47,13 @@ import com.linkedin.restli.server.annotations.RestLiCollection; import com.linkedin.restli.server.annotations.RestMethod; import com.linkedin.restli.server.resources.CollectionResourceTaskTemplate; +import com.linkedin.util.Pair; import io.opentelemetry.extension.annotations.WithSpan; import java.net.URISyntaxException; import java.time.Clock; import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.inject.Inject; @@ -86,11 +92,11 @@ void setEntityService(EntityService entityService) { @Inject @Named("entitySearchService") - private EntitySearchService _entitySearchService; + private EntitySearchService entitySearchService; @Inject @Named("timeseriesAspectService") - private TimeseriesAspectService _timeseriesAspectService; + private TimeseriesAspectService timeseriesAspectService; @Inject @Named("authorizerChain") @@ -117,15 +123,13 @@ public Task get( final Urn urn = Urn.createFromString(urnStr); return RestliUtil.toTask( () -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { + READ, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect for " + urn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get aspect for " + urn); } final VersionedAspect aspect = _entityService.getVersionedAspect(urn, aspectName, version); @@ -164,15 +168,13 @@ public Task getTimeseriesAspectValues( final Urn urn = Urn.createFromString(urnStr); return RestliUtil.toTask( () -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, + if (!isAPIAuthorizedUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_TIMESERIES_ASPECT_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { + TIMESERIES, READ, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get timeseries aspect for " + urn); } GetTimeseriesAspectValuesResponse response = new GetTimeseriesAspectValuesResponse(); @@ -191,7 +193,7 @@ public Task getTimeseriesAspectValues( } response.setValues( new EnvelopedAspectArray( - _timeseriesAspectService.getAspectValues( + timeseriesAspectService.getAspectValues( urn, entityName, aspectName, @@ -222,19 +224,21 @@ public Task ingestProposal( } Authentication authentication = AuthenticationContext.getAuthentication(); - com.linkedin.metadata.models.EntitySpec entitySpec = - _entityService.getEntityRegistry().getEntitySpec(metadataChangeProposal.getEntityType()); - Urn urn = - EntityKeyUtils.getUrnFromProposal(metadataChangeProposal, entitySpec.getKeyAspectSpec()); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, - _authorizer, - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { - throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to modify entity " + urn); - } + + /* + Ingest Authorization Checks + */ + List> exceptions = isAPIAuthorized(authentication, _authorizer, ENTITY, + _entityService.getEntityRegistry(), List.of(metadataChangeProposal)) + .stream().filter(p -> p.getSecond() != HttpStatus.S_200_OK.getCode()) + .collect(Collectors.toList()); + if (!exceptions.isEmpty()) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to modify entity: " + exceptions.stream() + .map(ex -> String.format("HttpStatus: %s Urn: %s", ex.getSecond(), ex.getFirst().getEntityUrn())) + .collect(Collectors.toList())); + } + String actorUrnStr = authentication.getActor().toUrnStr(); final AuditStamp auditStamp = new AuditStamp().setTime(_clock.millis()).setActor(Urn.createFromString(actorUrnStr)); @@ -255,7 +259,7 @@ public Task ingestProposal( Urn resultUrn = one.getUrn(); if (one.isProcessedMCL() || one.isUpdate()) { tryIndexRunId( - resultUrn, metadataChangeProposal.getSystemMetadata(), _entitySearchService); + resultUrn, metadataChangeProposal.getSystemMetadata(), entitySearchService); } return resultUrn.toString(); } catch (ValidationException e) { @@ -273,15 +277,12 @@ public Task getCount( @ActionParam(PARAM_URN_LIKE) @Optional @Nullable String urnLike) { return RestliUtil.toTask( () -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), - (EntitySpec) null)) { + COUNTS, READ)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get aspect counts."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get aspect counts."); } return _entityService.getCountAspect(aspectName, urnLike); }, @@ -299,6 +300,13 @@ public Task restoreIndices( @ActionParam("batchSize") @Optional @Nullable Integer batchSize) { return RestliUtil.toTask( () -> { + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), + _authorizer, + PoliciesConfig.RESTORE_INDICES_PRIVILEGE)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to update entities."); + } return Utils.restoreIndices( aspectName, urn, urnLike, start, batchSize, _authorizer, _entityService); }, diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java index 869cfc7afdee8..dcb5cd8ab01c1 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/BatchIngestionRunResource.java @@ -49,11 +49,11 @@ public class BatchIngestionRunResource @Inject @Named("systemMetadataService") - private SystemMetadataService _systemMetadataService; + private SystemMetadataService systemMetadataService; @Inject @Named("entityService") - private EntityService _entityService; + private EntityService entityService; @Inject @Named("rollbackService") @@ -61,7 +61,7 @@ public class BatchIngestionRunResource @Inject @Named("authorizerChain") - private Authorizer _authorizer; + private Authorizer authorizer; /** Rolls back an ingestion run */ @Action(name = "rollback") @@ -88,10 +88,10 @@ public Task rollback( Authentication auth = AuthenticationContext.getAuthentication(); try { - return rollbackService.rollbackIngestion(runId, dryRun, doHardDelete, _authorizer, auth); + return rollbackService.rollbackIngestion(runId, dryRun, doHardDelete, authorizer, auth); } catch (AuthenticationException authException) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, authException.getMessage()); + HttpStatus.S_403_FORBIDDEN, authException.getMessage()); } }, MetricRegistry.name(this.getClass(), "rollback")); @@ -102,7 +102,7 @@ public Task rollback( } } - /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ + /** Retrieves the ingestion run summaries. */ @Action(name = "list") @Nonnull @WithSpan @@ -115,7 +115,7 @@ public Task list( return RestliUtil.toTask( () -> { List summaries = - _systemMetadataService.listRuns( + systemMetadataService.listRuns( pageOffset != null ? pageOffset : DEFAULT_OFFSET, pageSize != null ? pageSize : DEFAULT_PAGE_SIZE, includeSoft != null ? includeSoft : DEFAULT_INCLUDE_SOFT_DELETED); @@ -139,7 +139,7 @@ public Task describe( return RestliUtil.toTask( () -> { List summaries = - _systemMetadataService.findByRunId( + systemMetadataService.findByRunId( runId, includeSoft != null && includeSoft, start, count); if (includeAspect != null && includeAspect) { @@ -148,7 +148,7 @@ public Task describe( Urn urn = UrnUtils.getUrn(summary.getUrn()); try { EnvelopedAspect aspect = - _entityService.getLatestEnvelopedAspect( + entityService.getLatestEnvelopedAspect( urn.getEntityType(), urn, summary.getAspectName()); if (aspect == null) { log.error("Aspect for summary {} not found", summary); diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java index 27620e0b49d0c..48e680599f447 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityResource.java @@ -1,19 +1,28 @@ package com.linkedin.metadata.resources.entity; -import static com.linkedin.metadata.Constants.*; +import static com.datahub.authorization.AuthUtil.*; +import static com.linkedin.metadata.authorization.ApiGroup.COUNTS; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiGroup.TIMESERIES; +import static com.linkedin.metadata.authorization.ApiOperation.CREATE; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.EXISTS; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static com.linkedin.metadata.entity.validation.ValidationUtils.*; import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; import static com.linkedin.metadata.search.utils.SearchUtils.*; -import static com.linkedin.metadata.shared.ValidationUtils.*; import static com.linkedin.metadata.utils.PegasusUtils.*; import com.codahale.metrics.MetricRegistry; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.AuthUtil; import com.datahub.authorization.EntitySpec; +import com.linkedin.metadata.authorization.Disjunctive; +import io.datahubproject.metadata.context.RequestContext; +import io.datahubproject.metadata.services.RestrictedService; import io.datahubproject.metadata.context.OperationContext; -import io.datahubproject.metadata.context.OperationContextConfig; import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -123,48 +132,52 @@ public class EntityResource extends CollectionResourceTaskTemplate _entityService; + private EntityService entityService; @Inject @Named("searchService") - private SearchService _searchService; + private SearchService searchService; @Inject @Named("entitySearchService") - private EntitySearchService _entitySearchService; + private EntitySearchService entitySearchService; @Inject @Named("systemMetadataService") - private SystemMetadataService _systemMetadataService; + private SystemMetadataService systemMetadataService; @Inject @Named("relationshipSearchService") - private LineageSearchService _lineageSearchService; + private LineageSearchService lineageSearchService; @Inject @Named("kafkaEventProducer") - private EventProducer _eventProducer; + private EventProducer eventProducer; @Inject @Named("graphService") - private GraphService _graphService; + private GraphService graphService; @Inject @Named("deleteEntityService") - private DeleteEntityService _deleteEntityService; + private DeleteEntityService deleteEntityService; @Inject @Named("timeseriesAspectService") - private TimeseriesAspectService _timeseriesAspectService; + private TimeseriesAspectService timeseriesAspectService; @Inject @Named("authorizerChain") - private Authorizer _authorizer; + private Authorizer authorizer; @Inject @Named("systemOperationContext") private OperationContext systemOperationContext; + @Inject + @Named("restrictedService") + private RestrictedService restrictedService; + /** Retrieves the value for an entity that is made up of latest versions of specified aspects. */ @RestMethod.Get @Nonnull @@ -174,15 +187,13 @@ public Task get( throws URISyntaxException { log.info("GET {}", urnStr); final Urn urn = Urn.createFromString(urnStr); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), + authorizer, + READ, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity " + urn); } return RestliUtil.toTask( () -> { @@ -190,7 +201,7 @@ public Task get( aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - final Entity entity = _entityService.getEntity(urn, projectedAspects); + final Entity entity = entityService.getEntity(urn, projectedAspects); if (entity == null) { throw RestliUtil.resourceNotFoundException(String.format("Did not find %s", urnStr)); } @@ -211,19 +222,14 @@ public Task> batchGet( for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } - List> resourceSpecs = - urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - resourceSpecs)) { + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), + authorizer, + READ, + urns)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities: " + urnStrs); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entities: " + urnStrs); } return RestliUtil.toTask( () -> { @@ -231,7 +237,7 @@ public Task> batchGet( aspectNames == null ? Collections.emptySet() : new HashSet<>(Arrays.asList(aspectNames)); - return _entityService.getEntities(urns, projectedAspects).entrySet().stream() + return entityService.getEntities(urns, projectedAspects).entrySet().stream() .collect( Collectors.toMap( entry -> entry.getKey().toString(), @@ -263,14 +269,13 @@ public Task ingest( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); final Urn urn = com.datahub.util.ModelUtils.getUrnFromSnapshotUnion(entity.getValue()); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + if (!isAPIAuthorizedEntityUrns( authentication, - _authorizer, - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { + authorizer, + CREATE, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entity " + urn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to edit entity " + urn); } try { @@ -288,7 +293,7 @@ public Task ingest( final SystemMetadata finalSystemMetadata = systemMetadata; return RestliUtil.toTask( () -> { - _entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); + entityService.ingestEntity(entity, auditStamp, finalSystemMetadata); return null; }, MetricRegistry.name(this.getClass(), "ingest")); @@ -304,20 +309,16 @@ public Task batchIngest( Authentication authentication = AuthenticationContext.getAuthentication(); String actorUrnStr = authentication.getActor().toUrnStr(); - List> resourceSpecs = - Arrays.stream(entities) - .map(Entity::getValue) - .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + + if (!isAPIAuthorizedEntityUrns( authentication, - _authorizer, - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - resourceSpecs)) { + authorizer, + CREATE, + Arrays.stream(entities) + .map(Entity::getValue) + .map(com.datahub.util.ModelUtils::getUrnFromSnapshotUnion).collect(Collectors.toList()))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to edit entities."); } for (Entity entity : entities) { @@ -346,7 +347,7 @@ public Task batchIngest( return RestliUtil.toTask( () -> { - _entityService.ingestEntities( + entityService.ingestEntities( Arrays.asList(entities), auditStamp, finalSystemMetadataList); return null; }, @@ -365,18 +366,20 @@ public Task search( @ActionParam(PARAM_COUNT) int count, @Optional @Deprecated @Nullable @ActionParam(PARAM_FULLTEXT) Boolean fulltext, @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityType( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + READ, + entityName)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } - OperationContext opContext = OperationContext.asSession(systemOperationContext, _authorizer, auth, true); + OperationContext opContext = OperationContext.asSession(systemOperationContext, + RequestContext.builder().buildRestli(ACTION_SEARCH, entityName), authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(Boolean.TRUE.equals(fulltext))); log.info("GET SEARCH RESULTS for {} with query {}", entityName, input); // TODO - change it to use _searchService once we are confident on it's latency @@ -385,9 +388,18 @@ public Task search( final SearchResult result; // This API is not used by the frontend for search bars so we default to structured result = - _entitySearchService.search(opContext, + entitySearchService.search(opContext, List.of(entityName), input, filter, sortCriterion, start, count); - return validateSearchResult(result, _entityService); + + if (!isAPIAuthorizedResult( + auth, + authorizer, + result)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized get entity."); + } + + return validateSearchResult(result, entityService); }, MetricRegistry.name(this.getClass(), "search")); } @@ -403,29 +415,36 @@ public Task searchAcrossEntities( @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + + final Authentication auth = AuthenticationContext.getAuthentication(); + OperationContext opContext = OperationContext.asSession( + systemOperationContext, RequestContext.builder().buildRestli(ACTION_SEARCH_ACROSS_ENTITIES, entities), authorizer, auth, true) + .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)); + + List entityList = searchService.getEntitiesToSearch(opContext, entities == null ? Collections.emptyList() : Arrays.asList(entities), count); + if (!isAPIAuthorizedEntityType( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + READ, + entityList)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } - OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true) - .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)); - List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info("GET SEARCH RESULTS ACROSS ENTITIES for {} with query {}", entityList, input); return RestliUtil.toTask( - () -> - validateSearchResult( - _searchService.searchAcrossEntities(opContext, - entityList, input, filter, sortCriterion, start, count), - _entityService), - "searchAcrossEntities"); + () -> { + SearchResult result = searchService.searchAcrossEntities(opContext, entityList, input, filter, sortCriterion, start, count); + if (!isAPIAuthorizedResult( + auth, + authorizer, + result)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized get entity."); + } + + return validateSearchResult(result, entityService); + }); } @Action(name = ACTION_SCROLL_ACROSS_ENTITIES) @@ -440,12 +459,21 @@ public Task scrollAcrossEntities( @ActionParam(PARAM_KEEP_ALIVE) String keepAlive, @ActionParam(PARAM_COUNT) int count, @ActionParam(PARAM_SEARCH_FLAGS) @Optional SearchFlags searchFlags) { - Authentication auth = AuthenticationContext.getAuthentication(); + + final Authentication auth = AuthenticationContext.getAuthentication(); OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true) + systemOperationContext, RequestContext.builder().buildRestli(ACTION_SCROLL_ACROSS_ENTITIES, entities), authorizer, auth, true) .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)); - List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); + List entityList = searchService.getEntitiesToSearch(opContext, entities == null ? Collections.emptyList() : Arrays.asList(entities), count); + if (!isAPIAuthorizedEntityType( + auth, + authorizer, + READ, entityList)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); + } + log.info( "GET SCROLL RESULTS ACROSS ENTITIES for {} with query {} and scroll ID: {}", entityList, @@ -453,18 +481,26 @@ public Task scrollAcrossEntities( scrollId); return RestliUtil.toTask( - () -> - validateScrollResult( - _searchService.scrollAcrossEntities( - opContext, - entityList, - input, - filter, - sortCriterion, - scrollId, - keepAlive, - count), - _entityService), + () -> { + ScrollResult result = searchService.scrollAcrossEntities( + opContext, + entityList, + input, + filter, + sortCriterion, + scrollId, + keepAlive, + count); + if (!isAPIAuthorizedResult( + auth, + authorizer, + result)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized get entity."); + } + + return validateScrollResult(result, entityService); + }, "scrollAcrossEntities"); } @@ -486,20 +522,20 @@ public Task searchAcrossLineage( @Optional @Nullable @ActionParam(PARAM_SEARCH_FLAGS) SearchFlags searchFlags) throws URISyntaxException { - Authentication auth = AuthenticationContext.getAuthentication(); - OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true) - .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)); - - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!isAPIAuthorized( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + LINEAGE, READ)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } + + OperationContext opContext = OperationContext.asSession( + systemOperationContext, RequestContext.builder().buildRestli(ACTION_SEARCH_ACROSS_LINEAGE, entities), authorizer, auth, true) + .withSearchFlags(flags -> (searchFlags != null ? searchFlags : new SearchFlags().setFulltext(true)) + .setIncludeRestricted(true)); + Urn urn = Urn.createFromString(urnStr); List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); log.info( @@ -509,10 +545,9 @@ public Task searchAcrossLineage( entityList, input); return RestliUtil.toTask( - () -> - validateLineageSearchResult( - _lineageSearchService.searchAcrossLineage( - opContext, + () -> { + LineageSearchResult result = validateLineageSearchResult(lineageSearchService.searchAcrossLineage( + opContext, urn, LineageDirection.valueOf(direction), entityList, @@ -523,8 +558,10 @@ public Task searchAcrossLineage( start, count, startTimeMillis, - endTimeMillis), - _entityService), + endTimeMillis), entityService); + + return result; + }, "searchAcrossRelationships"); } @@ -547,10 +584,19 @@ public Task scrollAcrossLineage( @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) throws URISyntaxException { - Authentication auth = AuthenticationContext.getAuthentication(); + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!isAPIAuthorized( + auth, + authorizer, + LINEAGE, READ)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); + } + OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true) - .withSearchFlags(flags -> searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true)); + systemOperationContext, RequestContext.builder().buildRestli(ACTION_SCROLL_ACROSS_LINEAGE, entities), authorizer, auth, true) + .withSearchFlags(flags -> (searchFlags != null ? searchFlags : new SearchFlags().setSkipCache(true)) + .setIncludeRestricted(true)); Urn urn = Urn.createFromString(urnStr); List entityList = entities == null ? Collections.emptyList() : Arrays.asList(entities); @@ -562,23 +608,24 @@ public Task scrollAcrossLineage( input); return RestliUtil.toTask( - () -> - validateLineageScrollResult( - _lineageSearchService.scrollAcrossLineage( - opContext, - urn, - LineageDirection.valueOf(direction), - entityList, - input, - maxHops, - filter, - sortCriterion, - scrollId, - keepAlive, - count, - startTimeMillis, - endTimeMillis), - _entityService), + () -> { + LineageScrollResult result = validateLineageScrollResult(lineageSearchService.scrollAcrossLineage( + opContext, + urn, + LineageDirection.valueOf(direction), + entityList, + input, + maxHops, + filter, + sortCriterion, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis), entityService); + + return result; + }, "scrollAcrossLineage"); } @@ -592,27 +639,33 @@ public Task list( @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityType( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + READ, entityName)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true) + systemOperationContext, RequestContext.builder().buildRestli(ACTION_LIST, entityName), authorizer, auth, true) .withSearchFlags(flags -> new SearchFlags().setFulltext(false)); log.info("GET LIST RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( - () -> - validateListResult( - toListResult( - _entitySearchService.filter(opContext, entityName, filter, sortCriterion, start, count)), - _entityService), + () -> { + SearchResult result = entitySearchService.filter(opContext, entityName, filter, sortCriterion, start, count); + if (!AuthUtil.isAPIAuthorizedResult( + auth, + authorizer, + result)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized get entity."); + } + return validateListResult( + toListResult(result), entityService); + }, MetricRegistry.name(this.getClass(), "filter")); } @@ -627,22 +680,30 @@ public Task autocomplete( @ActionParam(PARAM_LIMIT) int limit, @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityType( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + READ, entityName)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true) + systemOperationContext, RequestContext.builder().buildRestli(ACTION_AUTOCOMPLETE, entityName), authorizer, auth, true) .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags); return RestliUtil.toTask( - () -> _entitySearchService.autoComplete(opContext, entityName, query, field, filter, limit), + () -> { + AutoCompleteResult result = entitySearchService.autoComplete(opContext, entityName, query, field, filter, limit); + if (!isAPIAuthorizedResult( + auth, + authorizer, + result)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized get entity."); + } + return result; }, MetricRegistry.name(this.getClass(), "autocomplete")); } @@ -657,26 +718,34 @@ public Task browse( @ActionParam(PARAM_LIMIT) int limit, @ActionParam(PARAM_SEARCH_FLAGS) @Optional @Nullable SearchFlags searchFlags) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityType( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + READ, entityName)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } + OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true) + systemOperationContext, RequestContext.builder().buildRestli(ACTION_BROWSE, entityName), authorizer, auth, true) .withSearchFlags(flags -> searchFlags != null ? searchFlags : flags); log.info("GET BROWSE RESULTS for {} at path {}", entityName, path); return RestliUtil.toTask( - () -> - validateBrowseResult( - _entitySearchService.browse(opContext, entityName, path, filter, start, limit), - _entityService), + () -> { + BrowseResult result = entitySearchService.browse(opContext, entityName, path, filter, start, limit); + if (!isAPIAuthorizedResult( + auth, + authorizer, + result)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized get entity."); + } + return validateBrowseResult( + result, + entityService); + }, MetricRegistry.name(this.getClass(), "browse")); } @@ -685,19 +754,18 @@ public Task browse( @WithSpan public Task getBrowsePaths( @ActionParam(value = PARAM_URN, typeref = com.linkedin.common.Urn.class) @Nonnull Urn urn) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), + authorizer, + READ, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity: " + urn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity: " + urn); } log.info("GET BROWSE PATHS for {}", urn); return RestliUtil.toTask( - () -> new StringArray(_entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), + () -> new StringArray(entitySearchService.getBrowsePaths(urnToEntityName(urn), urn)), MetricRegistry.name(this.getClass(), "getBrowsePaths")); } @@ -740,7 +808,7 @@ public Task deleteEntities( () -> { RollbackResponse response = new RollbackResponse(); List aspectRowsToDelete = - _systemMetadataService.findByRegistry( + systemMetadataService.findByRegistry( finalRegistryName, finalRegistryVersion.toString(), false, @@ -752,23 +820,14 @@ public Task deleteEntities( aspectRowsToDelete.stream() .collect(Collectors.groupingBy(AspectRowSummary::getUrn)) .keySet(); - List> resourceSpecs = - urns.stream() - .map(UrnUtils::getUrn) - .map( - urn -> - java.util.Optional.of( - new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - resourceSpecs)) { + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), + authorizer, + DELETE, + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toSet()))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entities."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to delete entities."); } response.setEntitiesAffected(urns.size()); response.setEntitiesDeleted( @@ -780,7 +839,7 @@ public Task deleteEntities( Map conditions = new HashMap(); conditions.put("registryName", finalRegistryName1); conditions.put("registryVersion", finalRegistryVersion1.toString()); - _entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); + entityService.rollbackWithConditions(aspectRowsToDelete, conditions, false); } return response; }, @@ -808,23 +867,21 @@ public Task deleteEntity( @ActionParam(PARAM_END_TIME_MILLIS) @Optional Long endTimeMillis) throws URISyntaxException { Urn urn = Urn.createFromString(urnStr); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList( - java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), + authorizer, + DELETE, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + urnStr); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to delete entity: " + urnStr); } return RestliUtil.toTask( () -> { // Find the timeseries aspects to delete. If aspectName is null, delete all. List timeseriesAspectNames = EntitySpecUtils.getEntityTimeseriesAspectNames( - _entityService.getEntityRegistry(), urn.getEntityType()); + entityService.getEntityRegistry(), urn.getEntityType()); if (aspectName != null && !timeseriesAspectNames.contains(aspectName)) { throw new UnsupportedOperationException( String.format("Not supported for non-timeseries aspect '{}'.", aspectName)); @@ -834,7 +891,7 @@ public Task deleteEntity( DeleteEntityResponse response = new DeleteEntityResponse(); if (aspectName == null) { - RollbackRunResult result = _entityService.deleteUrn(urn); + RollbackRunResult result = entityService.deleteUrn(urn); response.setRows(result.getRowsDeletedFromEntityDeletion()); } Long numTimeseriesDocsDeleted = @@ -868,15 +925,14 @@ private Long deleteTimeseriesAspects( @Nullable Long endTimeMillis, @Nonnull List aspectsToDelete) { long totalNumberOfDocsDeleted = 0; - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urn.toString()))) { + + if (!isAPIAuthorizedUrns( + AuthenticationContext.getAuthentication(), + authorizer, + TIMESERIES, DELETE, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to delete entity " + urn); } // Construct the filter. List criteria = new ArrayList<>(); @@ -897,7 +953,7 @@ private Long deleteTimeseriesAspects( final String entityType = urn.getEntityType(); for (final String aspect : aspectsToDelete) { DeleteAspectValuesResult result = - _timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); + timeseriesAspectService.deleteAspectValues(entityType, aspect, filter); totalNumberOfDocsDeleted += result.getNumDocsDeleted(); log.debug( @@ -921,18 +977,17 @@ public Task deleteReferencesTo( boolean dryRun = dry != null ? dry : false; Urn urn = Urn.createFromString(urnStr); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), + authorizer, + DELETE, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity " + urnStr); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to delete entity " + urnStr); } return RestliUtil.toTask( - () -> _deleteEntityService.deleteReferencesTo(urn, dryRun), + () -> deleteEntityService.deleteReferencesTo(urn, dryRun), MetricRegistry.name(this.getClass(), "deleteReferences")); } @@ -944,20 +999,18 @@ public Task deleteReferencesTo( @WithSpan public Task setWriteable( @ActionParam(PARAM_VALUE) @Optional("true") @Nonnull Boolean value) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SET_WRITEABLE_PRIVILEGE), - (EntitySpec) null)) { + + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), + authorizer, + PoliciesConfig.SET_WRITEABLE_PRIVILEGE)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to enable and disable write mode."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to enable and disable write mode."); } log.info("setting entity resource to be writable"); return RestliUtil.toTask( () -> { - _entityService.setWritable(value); + entityService.setWritable(value); return null; }); } @@ -966,19 +1019,18 @@ public Task setWriteable( @Nonnull @WithSpan public Task getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String entityName) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!isAPIAuthorized( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + COUNTS, READ)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity counts."); } OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true); - return RestliUtil.toTask(() -> _entitySearchService.docCount(opContext, entityName)); + systemOperationContext, RequestContext.builder().buildRestli("getTotalEntityCount", entityName), authorizer, auth, true); + return RestliUtil.toTask(() -> entitySearchService.docCount(opContext, entityName)); } @Action(name = "batchGetTotalEntityCount") @@ -986,20 +1038,18 @@ public Task getTotalEntityCount(@ActionParam(PARAM_ENTITY) @Nonnull String @WithSpan public Task batchGetTotalEntityCount( @ActionParam(PARAM_ENTITIES) @Nonnull String[] entityNames) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!isAPIAuthorized( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.GET_COUNTS_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + COUNTS, READ)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity counts."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity counts."); } OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true); + systemOperationContext, RequestContext.builder().buildRestli("batchGetTotalEntityCount", entityNames), authorizer, auth, true); return RestliUtil.toTask( - () -> new LongMap(_searchService.docCountPerEntity(opContext, Arrays.asList(entityNames)))); + () -> new LongMap(searchService.docCountPerEntity(opContext, Arrays.asList(entityNames)))); } @Action(name = ACTION_LIST_URNS) @@ -1010,18 +1060,27 @@ public Task listUrns( @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) throws URISyntaxException { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityType( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + READ, entityName)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } log.info("LIST URNS for {} with start {} and count {}", entityName, start, count); - return RestliUtil.toTask(() -> _entityService.listUrns(entityName, start, count), "listUrns"); + return RestliUtil.toTask(() -> { + ListUrnsResult result = entityService.listUrns(entityName, start, count); + if (!isAPIAuthorizedEntityUrns( + auth, + authorizer, + READ, result.getEntities())) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity counts."); + } + return result; + }, "listUrns"); } @Action(name = ACTION_APPLY_RETENTION) @@ -1033,23 +1092,22 @@ public Task applyRetention( @ActionParam("attemptWithVersion") @Optional @Nullable Integer attemptWithVersion, @ActionParam(PARAM_ASPECT_NAME) @Optional @Nullable String aspectName, @ActionParam(PARAM_URN) @Optional @Nullable String urn) { - Authentication auth = AuthenticationContext.getAuthentication(); + EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { Urn resource = UrnUtils.getUrn(urn); resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.APPLY_RETENTION_PRIVILEGE), + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), + authorizer, + PoliciesConfig.APPLY_RETENTION_PRIVILEGE, resourceSpec)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to apply retention."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to apply retention."); } return RestliUtil.toTask( - () -> _entityService.batchApplyRetention(start, count, attemptWithVersion, aspectName, urn), + () -> entityService.batchApplyRetention(start, count, attemptWithVersion, aspectName, urn), ACTION_APPLY_RETENTION); } @@ -1063,24 +1121,30 @@ public Task filter( @ActionParam(PARAM_START) int start, @ActionParam(PARAM_COUNT) int count) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( + final Authentication auth = AuthenticationContext.getAuthentication(); + if (!AuthUtil.isAPIAuthorizedEntityType( auth, - _authorizer, - ImmutableList.of(PoliciesConfig.SEARCH_PRIVILEGE), - (EntitySpec) null)) { + authorizer, + READ, entityName)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to search."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to search."); } OperationContext opContext = OperationContext.asSession( - systemOperationContext, _authorizer, auth, true); + systemOperationContext, RequestContext.builder().buildRestli(ACTION_FILTER, entityName), authorizer, auth, true); log.info("FILTER RESULTS for {} with filter {}", entityName, filter); return RestliUtil.toTask( - () -> - validateSearchResult( - _entitySearchService.filter(opContext.withSearchFlags(flags -> flags.setFulltext(true)), entityName, filter, sortCriterion, start, count), - _entityService), + () -> { + SearchResult result = entitySearchService.filter(opContext.withSearchFlags(flags -> flags.setFulltext(true)), entityName, filter, sortCriterion, start, count); + if (!isAPIAuthorizedResult( + auth, + authorizer, + result)) { + throw new RestLiServiceException( + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity counts."); + } + return validateSearchResult( + result, + entityService);}, MetricRegistry.name(this.getClass(), "search")); } @@ -1090,18 +1154,17 @@ public Task filter( public Task exists(@ActionParam(PARAM_URN) @Nonnull String urnStr) throws URISyntaxException { Urn urn = UrnUtils.getUrn(urnStr); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, - _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), + authorizer, + EXISTS, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized get entity: " + urnStr); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized check entity existence: " + urnStr); } log.info("EXISTS for {}", urnStr); return RestliUtil.toTask( - () -> _entityService.exists(urn, true), MetricRegistry.name(this.getClass(), "exists")); + () -> entityService.exists(urn, true), MetricRegistry.name(this.getClass(), "exists")); } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java index 9653a20bd8785..7362ba4e5fc6a 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityV2Resource.java @@ -1,17 +1,16 @@ package com.linkedin.metadata.resources.entity; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.datahub.authorization.AuthUtil.isAPIAuthorizedEntityUrns; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; import static com.linkedin.metadata.utils.PegasusUtils.urnToEntityName; import com.codahale.metrics.MetricRegistry; -import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.entity.EntityResponse; import com.linkedin.metadata.authorization.PoliciesConfig; @@ -47,7 +46,7 @@ public class EntityV2Resource extends CollectionResourceTaskTemplate _entityService; @Inject @Named("authorizerChain") @@ -62,22 +61,21 @@ public Task get( throws URISyntaxException { log.debug("GET V2 {}", urnStr); final Urn urn = Urn.createFromString(urnStr); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - new EntitySpec(urn.getEntityType(), urnStr))) { + READ, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity " + urn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity " + urn); } return RestliUtil.toTask( () -> { final String entityName = urnToEntityName(urn); final Set projectedAspects = aspectNames == null - ? getAllAspectNames(_entityService, entityName) + ? _entityService.getEntityAspectNames(entityName) : new HashSet<>(Arrays.asList(aspectNames)); try { return _entityService.getEntityV2(entityName, urn, projectedAspects); @@ -103,19 +101,14 @@ public Task> batchGet( for (final String urnStr : urnStrs) { urns.add(Urn.createFromString(urnStr)); } - Authentication auth = AuthenticationContext.getAuthentication(); - List> resourceSpecs = - urns.stream() - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - resourceSpecs)) { + READ, + urns)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entities " + urnStrs); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entities " + urnStrs); } if (urns.size() <= 0) { return Task.value(Collections.emptyMap()); @@ -125,7 +118,7 @@ public Task> batchGet( () -> { final Set projectedAspects = aspectNames == null - ? getAllAspectNames(_entityService, entityName) + ? _entityService.getEntityAspectNames(entityName) : new HashSet<>(Arrays.asList(aspectNames)); try { return _entityService.getEntitiesV2(entityName, urns, projectedAspects); diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java index fc556d15342c2..96e2ab5abde95 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/EntityVersionedV2Resource.java @@ -1,16 +1,16 @@ package com.linkedin.metadata.resources.entity; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.entity.ResourceUtils.*; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.datahub.authorization.AuthUtil.isAPIAuthorizedEntityUrns; +import static com.datahub.authorization.AuthUtil.isAPIAuthorizedUrns; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; import com.codahale.metrics.MetricRegistry; -import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -53,7 +53,7 @@ public class EntityVersionedV2Resource @Inject @Named("entityService") - private EntityService _entityService; + private EntityService _entityService; @Inject @Named("authorizerChain") @@ -66,20 +66,15 @@ public Task> batchGetVersioned( @Nonnull Set versionedUrnStrs, @QueryParam(PARAM_ENTITY_TYPE) @Nonnull String entityType, @QueryParam(PARAM_ASPECTS) @Optional @Nullable String[] aspectNames) { - Authentication auth = AuthenticationContext.getAuthentication(); - List> resourceSpecs = - versionedUrnStrs.stream() - .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())) - .map(urn -> java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .collect(Collectors.toList()); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - resourceSpecs)) { + READ, + versionedUrnStrs.stream() + .map(versionedUrn -> UrnUtils.getUrn(versionedUrn.getUrn())).collect(Collectors.toSet()))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entities " + versionedUrnStrs); } log.debug("BATCH GET VERSIONED V2 {}", versionedUrnStrs); @@ -90,7 +85,7 @@ public Task> batchGetVersioned( () -> { final Set projectedAspects = aspectNames == null - ? getAllAspectNames(_entityService, entityType) + ? _entityService.getEntityAspectNames(entityType) : new HashSet<>(Arrays.asList(aspectNames)); try { return _entityService.getEntitiesVersionedV2( diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java deleted file mode 100644 index 1b22cc135b037..0000000000000 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/entity/ResourceUtils.java +++ /dev/null @@ -1,13 +0,0 @@ -package com.linkedin.metadata.resources.entity; - -import com.linkedin.metadata.entity.EntityService; -import java.util.Set; - -public class ResourceUtils { - private ResourceUtils() {} - - public static Set getAllAspectNames( - final EntityService entityService, final String entityName) { - return entityService.getEntityAspectNames(entityName); - } -} diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java index 3fdd1d804a83f..d04efcaa85e49 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/lineage/Relationships.java @@ -1,20 +1,21 @@ package com.linkedin.metadata.resources.lineage; -import static com.linkedin.metadata.Constants.*; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.datahub.authorization.AuthUtil.isAPIAuthorizedUrns; +import static com.linkedin.metadata.authorization.ApiGroup.LINEAGE; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; +import static com.linkedin.metadata.authorization.ApiOperation.READ; import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_COUNT; import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_DIRECTION; import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_START; import static com.linkedin.metadata.resources.restli.RestliConstants.PARAM_URN; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; import static com.linkedin.metadata.search.utils.QueryUtils.newFilter; import static com.linkedin.metadata.search.utils.QueryUtils.newRelationshipFilter; import com.codahale.metrics.MetricRegistry; -import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationshipArray; import com.linkedin.common.EntityRelationships; @@ -114,16 +115,14 @@ public Task get( @QueryParam("start") @Optional @Nullable Integer start, @QueryParam("count") @Optional @Nullable Integer count) { Urn urn = UrnUtils.getUrn(rawUrn); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorizedUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList( - java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + LINEAGE, READ, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + rawUrn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity lineage: " + rawUrn); } RelationshipDirection direction = RelationshipDirection.valueOf(rawDirection); final List relationshipTypes = Arrays.asList(relationshipTypesParam); @@ -162,16 +161,14 @@ public Task get( @RestMethod.Delete public UpdateResponse delete(@QueryParam("urn") @Nonnull String rawUrn) throws Exception { Urn urn = Urn.createFromString(rawUrn); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorizedUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE), - Collections.singletonList( - java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + LINEAGE, DELETE, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to delete entity: " + rawUrn); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to delete entity: " + rawUrn); } _graphService.removeNode(urn); return new UpdateResponse(HttpStatus.S_200_OK); @@ -189,16 +186,14 @@ public Task getLineage( throws URISyntaxException { log.info("GET LINEAGE {} {} {} {} {}", urnStr, direction, start, count, maxHops); final Urn urn = Urn.createFromString(urnStr); - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorizedUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_ENTITY_PRIVILEGE), - Collections.singletonList( - java.util.Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))))) { + LINEAGE, READ, + List.of(urn))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get entity lineage: " + urnStr); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get entity lineage: " + urnStr); } return RestliUtil.toTask( () -> diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java index 499fc0f5221fe..13d88f30dd032 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/OperationsResource.java @@ -1,16 +1,15 @@ package com.linkedin.metadata.resources.operations; -import static com.linkedin.metadata.Constants.*; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; import static com.linkedin.metadata.resources.restli.RestliConstants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; import com.codahale.metrics.MetricRegistry; -import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; +import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.ImmutableList; import com.linkedin.metadata.aspect.VersionedAspect; +import com.linkedin.metadata.authorization.Disjunctive; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.query.filter.Condition; @@ -122,15 +121,13 @@ public Task getTaskStatus( @ActionParam(PARAM_TASK) @Optional String task) { return RestliUtil.toTask( () -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, + + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE), - List.of(java.util.Optional.empty()))) { + PoliciesConfig.GET_ES_TASK_STATUS_PRIVILEGE)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get ES task status"); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get ES task status"); } boolean taskSpecified = task != null; boolean nodeAndTaskIdSpecified = nodeId != null && taskId > 0; @@ -183,15 +180,13 @@ public Task getTaskStatus( public Task getIndexSizes() { return RestliUtil.toTask( () -> { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, + + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE), - List.of(java.util.Optional.empty()))) { + PoliciesConfig.GET_TIMESERIES_INDEX_SIZES_PRIVILEGE)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to get index sizes."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to get index sizes."); } TimeseriesIndicesSizesResult result = new TimeseriesIndicesSizesResult(); result.setIndexSizes( @@ -211,15 +206,13 @@ String executeTruncateTimeseriesAspect( @Nullable Long timeoutSeconds, @Nullable Boolean forceDeleteByQuery, @Nullable Boolean forceReindex) { - Authentication authentication = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, + + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE), - List.of(java.util.Optional.empty()))) { + PoliciesConfig.TRUNCATE_TIMESERIES_INDEX_PRIVILEGE)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to truncate timeseries index"); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to truncate timeseries index"); } if (forceDeleteByQuery != null && forceDeleteByQuery.equals(forceReindex)) { diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java index 7c7c25ad3492c..5f999482cd859 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/operations/Utils.java @@ -1,15 +1,13 @@ package com.linkedin.metadata.resources.operations; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; -import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; +import com.linkedin.metadata.authorization.Disjunctive; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.restoreindices.RestoreIndicesArgs; @@ -35,20 +33,19 @@ public static String restoreIndices( @Nullable Integer batchSize, @Nonnull Authorizer authorizer, @Nonnull EntityService entityService) { - Authentication authentication = AuthenticationContext.getAuthentication(); + EntitySpec resourceSpec = null; if (StringUtils.isNotBlank(urn)) { Urn resource = UrnUtils.getUrn(urn); resourceSpec = new EntitySpec(resource.getEntityType(), resource.toString()); } - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - authentication, + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), authorizer, - ImmutableList.of(PoliciesConfig.RESTORE_INDICES_PRIVILEGE), + PoliciesConfig.RESTORE_INDICES_PRIVILEGE, resourceSpec)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to restore indices."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to restore indices."); } RestoreIndicesArgs args = new RestoreIndicesArgs() diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java index f4bc0dd72e4c6..5b2f19c661dab 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/platform/PlatformResource.java @@ -1,14 +1,11 @@ package com.linkedin.metadata.resources.platform; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; -import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; -import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; import com.linkedin.entity.Entity; +import com.linkedin.metadata.authorization.Disjunctive; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.event.EventProducer; import com.linkedin.metadata.restli.RestliUtil; @@ -49,15 +46,13 @@ public Task producePlatformEvent( @ActionParam("name") @Nonnull String eventName, @ActionParam("key") @Optional String key, @ActionParam("event") @Nonnull PlatformEvent event) { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE), - (EntitySpec) null)) { + PoliciesConfig.PRODUCE_PLATFORM_EVENT_PRIVILEGE)) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to produce platform events."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to produce platform events."); } log.info(String.format("Emitting platform event. name: %s, key: %s", eventName, key)); return RestliUtil.toTask( diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java index 278cd48bc455e..0ca8eb49308b3 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/restli/RestliUtils.java @@ -1,20 +1,10 @@ package com.linkedin.metadata.resources.restli; -import com.datahub.authentication.Authentication; -import com.datahub.authorization.AuthUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; -import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.parseq.Task; import com.linkedin.restli.common.HttpStatus; import com.linkedin.restli.server.RestLiServiceException; -import java.util.List; import java.util.Optional; import java.util.function.Supplier; -import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; @@ -83,37 +73,4 @@ public static RestLiServiceException badRequestException(@Nullable String messag public static RestLiServiceException invalidArgumentsException(@Nullable String message) { return new RestLiServiceException(HttpStatus.S_412_PRECONDITION_FAILED, message); } - - public static boolean isAuthorized( - @Nonnull Authentication authentication, - @Nonnull Authorizer authorizer, - @Nonnull final List privileges, - @Nonnull final List> resources) { - DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorizedForResources( - authorizer, authentication.getActor().toUrnStr(), resources, orGroup); - } - - public static boolean isAuthorized( - @Nonnull Authentication authentication, - @Nonnull Authorizer authorizer, - @Nonnull final List privileges, - @Nullable final EntitySpec resource) { - DisjunctivePrivilegeGroup orGroup = convertPrivilegeGroup(privileges); - return AuthUtil.isAuthorized( - authorizer, - authentication.getActor().toUrnStr(), - java.util.Optional.ofNullable(resource), - orGroup); - } - - private static DisjunctivePrivilegeGroup convertPrivilegeGroup( - @Nonnull final List privileges) { - return new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - privileges.stream() - .map(PoliciesConfig.Privilege::getType) - .collect(Collectors.toList())))); - } } diff --git a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java index f80dfa5ce0f23..6f84b518aada5 100644 --- a/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java +++ b/metadata-service/restli-servlet-impl/src/main/java/com/linkedin/metadata/resources/usage/UsageStats.java @@ -1,10 +1,11 @@ package com.linkedin.metadata.resources.usage; +import static com.datahub.authorization.AuthUtil.isAPIAuthorized; +import static com.datahub.authorization.AuthUtil.isAPIAuthorizedEntityUrns; import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.resources.restli.RestliUtils.*; +import static com.linkedin.metadata.authorization.ApiOperation.UPDATE; import com.codahale.metrics.MetricRegistry; -import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationContext; import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; @@ -12,7 +13,6 @@ import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.collect.ImmutableList; import com.linkedin.common.WindowDuration; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -62,8 +62,10 @@ import java.net.URISyntaxException; import java.time.Instant; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.inject.Named; @@ -128,15 +130,14 @@ public Task batchIngest(@ActionParam(PARAM_BUCKETS) @Nonnull UsageAggregat log.info("Ingesting {} usage stats aggregations", buckets.length); return RestliUtil.toTask( () -> { - Authentication auth = AuthenticationContext.getAuthentication(); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + + if (!isAPIAuthorizedEntityUrns( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE), - (EntitySpec) null)) { + UPDATE, + Arrays.stream(buckets).sequential().map(UsageAggregation::getResource).collect(Collectors.toSet()))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to edit entities."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to edit entities."); } for (UsageAggregation agg : buckets) { this.ingest(agg); @@ -371,16 +372,15 @@ public Task query( log.info("Attempting to query usage stats"); return RestliUtil.toTask( () -> { - Authentication auth = AuthenticationContext.getAuthentication(); + Urn resourceUrn = UrnUtils.getUrn(resource); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE, new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to query usage."); } // 1. Populate the filter. This is common for all queries. Filter filter = new Filter(); @@ -444,16 +444,15 @@ public Task queryRange( @ActionParam(PARAM_RESOURCE) @Nonnull String resource, @ActionParam(PARAM_DURATION) @Nonnull WindowDuration duration, @ActionParam(PARAM_RANGE) UsageTimeRange range) { - Authentication auth = AuthenticationContext.getAuthentication(); + Urn resourceUrn = UrnUtils.getUrn(resource); - if (Boolean.parseBoolean(System.getenv(REST_API_AUTHORIZATION_ENABLED_ENV)) - && !isAuthorized( - auth, + if (!isAPIAuthorized( + AuthenticationContext.getAuthentication(), _authorizer, - ImmutableList.of(PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE, new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString()))) { throw new RestLiServiceException( - HttpStatus.S_401_UNAUTHORIZED, "User is unauthorized to query usage."); + HttpStatus.S_403_FORBIDDEN, "User is unauthorized to query usage."); } final long now = Instant.now().toEpochMilli(); return this.query(resource, duration, convertRangeToStartTime(range, now), now, null); diff --git a/metadata-service/services/build.gradle b/metadata-service/services/build.gradle index 78d651c05e4d9..9ec523bfd1e21 100644 --- a/metadata-service/services/build.gradle +++ b/metadata-service/services/build.gradle @@ -17,7 +17,11 @@ dependencies { api project(path: ':metadata-models', configuration: 'dataTemplate') api project(':metadata-models') - implementation project(':metadata-service:restli-client') + // Consider using the restli-client-api instead of dependency on project(':metadata-service:restli-client') + // to avoid circular dependencies with the restli/pegasus code gen + implementation project(':metadata-service:restli-client-api') + implementation project(':metadata-auth:auth-api') + implementation project(':metadata-operation-context') implementation project(':metadata-service:configuration') implementation externalDependency.slf4jApi diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java index 1b4b65baeecd6..9c44aefbed19d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/EntityService.java @@ -21,6 +21,7 @@ import com.linkedin.mxe.MetadataChangeLog; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.util.Pair; import java.net.URISyntaxException; import java.util.Collection; @@ -246,6 +247,13 @@ String batchApplyRetention( RestoreIndicesResult restoreIndices( @Nonnull RestoreIndicesArgs args, @Nonnull Consumer logger); + // Restore indices from list using key lookups (no scans) + List restoreIndices( + @Nonnull Set urns, + @Nullable Set inputAspectNames, + @Nullable Integer inputBatchSize) + throws RemoteInvocationException, URISyntaxException; + ListUrnsResult listUrns(@Nonnull final String entityName, final int start, final int count); @Deprecated diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java index a270cf4548bed..cd12a8c23a4d7 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/entity/restoreindices/RestoreIndicesResult.java @@ -13,6 +13,7 @@ public class RestoreIndicesResult { public long aspectCheckMs = 0; public long createRecordMs = 0; public long sendMessageMs = 0; + public long defaultAspectsCreated = 0; public String lastUrn = ""; public String lastAspect = ""; } diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java index 666fe23a93187..af904b6b9926d 100644 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java +++ b/metadata-service/services/src/main/java/com/linkedin/metadata/service/RollbackService.java @@ -1,15 +1,12 @@ package com.linkedin.metadata.service; import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; +import static com.linkedin.metadata.authorization.ApiOperation.DELETE; import com.datahub.authentication.Authentication; import com.datahub.authentication.AuthenticationException; import com.datahub.authorization.AuthUtil; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; -import com.datahub.authorization.EntitySpec; import com.datahub.plugins.auth.authorization.Authorizer; -import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -17,7 +14,6 @@ import com.linkedin.events.metadata.ChangeType; import com.linkedin.execution.ExecutionRequestResult; import com.linkedin.metadata.Constants; -import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.RollbackRunResult; import com.linkedin.metadata.key.ExecutionRequestKey; @@ -34,7 +30,6 @@ import com.linkedin.timeseries.DeleteAspectValuesResult; import java.util.List; import java.util.Map; -import java.util.Optional; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import javax.annotation.Nonnull; @@ -57,7 +52,6 @@ public class RollbackService { private final EntityService entityService; private final SystemMetadataService systemMetadataService; private final TimeseriesAspectService timeseriesAspectService; - private final boolean restApiAuthorizationEnabled; public List rollbackTargetAspects(@Nonnull String runId, boolean hardDelete) { return systemMetadataService.findByRunId(runId, hardDelete, 0, MAX_RESULT_SIZE); @@ -291,23 +285,15 @@ private boolean isAuthorized( final Authorizer authorizer, @Nonnull List rowSummaries, @Nonnull Authentication authentication) { - DisjunctivePrivilegeGroup orGroup = - new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup( - ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); - List> resourceSpecs = + return AuthUtil.isAPIAuthorizedEntityUrns( + authentication, + authorizer, + DELETE, rowSummaries.stream() .map(AspectRowSummary::getUrn) .map(UrnUtils::getUrn) - .map(urn -> Optional.of(new EntitySpec(urn.getEntityType(), urn.toString()))) - .distinct() - .collect(Collectors.toList()); - - return !restApiAuthorizationEnabled - || AuthUtil.isAuthorizedForResources( - authorizer, authentication.getActor().toUrnStr(), resourceSpecs, orGroup); + .collect(Collectors.toSet())); } private static String stringifyRowCount(int size) { diff --git a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java b/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java deleted file mode 100644 index b6bef33df1d7f..0000000000000 --- a/metadata-service/services/src/main/java/com/linkedin/metadata/shared/ValidationUtils.java +++ /dev/null @@ -1,245 +0,0 @@ -package com.linkedin.metadata.shared; - -import com.codahale.metrics.Timer; -import com.linkedin.common.UrnArray; -import com.linkedin.common.urn.Urn; -import com.linkedin.data.template.AbstractArrayTemplate; -import com.linkedin.metadata.browse.BrowseResult; -import com.linkedin.metadata.browse.BrowseResultEntity; -import com.linkedin.metadata.browse.BrowseResultEntityArray; -import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.graph.EntityLineageResult; -import com.linkedin.metadata.graph.LineageRelationship; -import com.linkedin.metadata.graph.LineageRelationshipArray; -import com.linkedin.metadata.query.ListResult; -import com.linkedin.metadata.search.LineageScrollResult; -import com.linkedin.metadata.search.LineageSearchEntity; -import com.linkedin.metadata.search.LineageSearchEntityArray; -import com.linkedin.metadata.search.LineageSearchResult; -import com.linkedin.metadata.search.ScrollResult; -import com.linkedin.metadata.search.SearchEntity; -import com.linkedin.metadata.search.SearchEntityArray; -import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.utils.metrics.MetricUtils; -import java.util.Objects; -import java.util.Set; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.Stream; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import lombok.extern.slf4j.Slf4j; - -@Slf4j -public class ValidationUtils { - - public static SearchResult validateSearchResult( - final SearchResult searchResult, @Nonnull final EntityService entityService) { - try (Timer.Context ignored = - MetricUtils.timer(ValidationUtils.class, "validateSearchResult").time()) { - if (searchResult == null) { - return null; - } - Objects.requireNonNull(entityService, "entityService must not be null"); - - SearchResult validatedSearchResult = - new SearchResult() - .setFrom(searchResult.getFrom()) - .setMetadata(searchResult.getMetadata()) - .setPageSize(searchResult.getPageSize()) - .setNumEntities(searchResult.getNumEntities()); - - SearchEntityArray validatedEntities = - validatedUrns(searchResult.getEntities(), SearchEntity::getEntity, entityService, true) - .collect(Collectors.toCollection(SearchEntityArray::new)); - validatedSearchResult.setEntities(validatedEntities); - - return validatedSearchResult; - } - } - - public static ScrollResult validateScrollResult( - final ScrollResult scrollResult, @Nonnull final EntityService entityService) { - if (scrollResult == null) { - return null; - } - Objects.requireNonNull(entityService, "entityService must not be null"); - - ScrollResult validatedScrollResult = - new ScrollResult() - .setMetadata(scrollResult.getMetadata()) - .setPageSize(scrollResult.getPageSize()) - .setNumEntities(scrollResult.getNumEntities()); - if (scrollResult.getScrollId() != null) { - validatedScrollResult.setScrollId(scrollResult.getScrollId()); - } - - SearchEntityArray validatedEntities = - validatedUrns(scrollResult.getEntities(), SearchEntity::getEntity, entityService, true) - .collect(Collectors.toCollection(SearchEntityArray::new)); - - validatedScrollResult.setEntities(validatedEntities); - - return validatedScrollResult; - } - - public static BrowseResult validateBrowseResult( - final BrowseResult browseResult, @Nonnull final EntityService entityService) { - try (Timer.Context ignored = - MetricUtils.timer(ValidationUtils.class, "validateBrowseResult").time()) { - if (browseResult == null) { - return null; - } - Objects.requireNonNull(entityService, "entityService must not be null"); - - BrowseResult validatedBrowseResult = - new BrowseResult() - .setGroups(browseResult.getGroups()) - .setMetadata(browseResult.getMetadata()) - .setFrom(browseResult.getFrom()) - .setPageSize(browseResult.getPageSize()) - .setNumGroups(browseResult.getNumGroups()) - .setNumEntities(browseResult.getNumEntities()) - .setNumElements(browseResult.getNumElements()); - - BrowseResultEntityArray validatedEntities = - validatedUrns(browseResult.getEntities(), BrowseResultEntity::getUrn, entityService, true) - .collect(Collectors.toCollection(BrowseResultEntityArray::new)); - validatedBrowseResult.setEntities(validatedEntities); - - return validatedBrowseResult; - } - } - - public static ListResult validateListResult( - final ListResult listResult, @Nonnull final EntityService entityService) { - try (Timer.Context ignored = - MetricUtils.timer(ValidationUtils.class, "validateListResult").time()) { - if (listResult == null) { - return null; - } - Objects.requireNonNull(entityService, "entityService must not be null"); - - ListResult validatedListResult = - new ListResult() - .setStart(listResult.getStart()) - .setCount(listResult.getCount()) - .setTotal(listResult.getTotal()); - - UrnArray validatedEntities = - validatedUrns(listResult.getEntities(), Function.identity(), entityService, true) - .collect(Collectors.toCollection(UrnArray::new)); - validatedListResult.setEntities(validatedEntities); - - return validatedListResult; - } - } - - public static LineageSearchResult validateLineageSearchResult( - final LineageSearchResult lineageSearchResult, - @Nonnull final EntityService entityService) { - try (Timer.Context ignored = - MetricUtils.timer(ValidationUtils.class, "validateLineageResult").time()) { - if (lineageSearchResult == null) { - return null; - } - Objects.requireNonNull(entityService, "entityService must not be null"); - - LineageSearchResult validatedLineageSearchResult = - new LineageSearchResult() - .setMetadata(lineageSearchResult.getMetadata()) - .setFrom(lineageSearchResult.getFrom()) - .setPageSize(lineageSearchResult.getPageSize()) - .setNumEntities(lineageSearchResult.getNumEntities()); - - LineageSearchEntityArray validatedEntities = - validatedUrns( - lineageSearchResult.getEntities(), - LineageSearchEntity::getEntity, - entityService, - true) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); - validatedLineageSearchResult.setEntities(validatedEntities); - - log.debug("Returning validated lineage search results"); - return validatedLineageSearchResult; - } - } - - public static EntityLineageResult validateEntityLineageResult( - @Nullable final EntityLineageResult entityLineageResult, - @Nonnull final EntityService entityService) { - if (entityLineageResult == null) { - return null; - } - Objects.requireNonNull(entityService, "entityService must not be null"); - - final EntityLineageResult validatedEntityLineageResult = - new EntityLineageResult() - .setStart(entityLineageResult.getStart()) - .setCount(entityLineageResult.getCount()) - .setTotal(entityLineageResult.getTotal()); - - LineageRelationshipArray validatedRelationships = - validatedUrns( - entityLineageResult.getRelationships(), - LineageRelationship::getEntity, - entityService, - false) - .collect(Collectors.toCollection(LineageRelationshipArray::new)); - - validatedEntityLineageResult.setFiltered( - (entityLineageResult.hasFiltered() && entityLineageResult.getFiltered() != null - ? entityLineageResult.getFiltered() - : 0) - + entityLineageResult.getRelationships().size() - - validatedRelationships.size()); - validatedEntityLineageResult.setRelationships(validatedRelationships); - - return validatedEntityLineageResult; - } - - public static LineageScrollResult validateLineageScrollResult( - final LineageScrollResult lineageScrollResult, - @Nonnull final EntityService entityService) { - if (lineageScrollResult == null) { - return null; - } - Objects.requireNonNull(entityService, "entityService must not be null"); - - LineageScrollResult validatedLineageScrollResult = - new LineageScrollResult() - .setMetadata(lineageScrollResult.getMetadata()) - .setPageSize(lineageScrollResult.getPageSize()) - .setNumEntities(lineageScrollResult.getNumEntities()); - if (lineageScrollResult.getScrollId() != null) { - validatedLineageScrollResult.setScrollId(lineageScrollResult.getScrollId()); - } - - LineageSearchEntityArray validatedEntities = - validatedUrns( - lineageScrollResult.getEntities(), - LineageSearchEntity::getEntity, - entityService, - true) - .collect(Collectors.toCollection(LineageSearchEntityArray::new)); - - validatedLineageScrollResult.setEntities(validatedEntities); - - return validatedLineageScrollResult; - } - - private static Stream validatedUrns( - final AbstractArrayTemplate array, - Function urnFunction, - @Nonnull final EntityService entityService, - boolean includeSoftDeleted) { - - Set existingUrns = - entityService.exists( - array.stream().map(urnFunction).collect(Collectors.toList()), includeSoftDeleted); - return array.stream().filter(item -> existingUrns.contains(urnFunction.apply(item))); - } - - private ValidationUtils() {} -} diff --git a/metadata-service/services/src/test/java/com/linkedin/metadata/service/RestrictedServiceTest.java b/metadata-service/services/src/test/java/com/linkedin/metadata/service/RestrictedServiceTest.java index 239e56cfd97a8..7fbb4794cd5f5 100644 --- a/metadata-service/services/src/test/java/com/linkedin/metadata/service/RestrictedServiceTest.java +++ b/metadata-service/services/src/test/java/com/linkedin/metadata/service/RestrictedServiceTest.java @@ -1,8 +1,11 @@ package com.linkedin.metadata.service; +import static org.mockito.Mockito.mock; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; -import com.linkedin.metadata.secret.SecretService; +import io.datahubproject.metadata.services.RestrictedService; +import io.datahubproject.metadata.services.SecretService; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; @@ -17,7 +20,7 @@ public class RestrictedServiceTest { @Test private void testEncryptRestrictedUrn() throws Exception { - SecretService mockSecretService = Mockito.mock(SecretService.class); + SecretService mockSecretService = mock(SecretService.class); Mockito.when(mockSecretService.encrypt(TEST_DATASET_URN.toString())) .thenReturn(ENCRYPED_DATASET_URN); final RestrictedService service = new RestrictedService(mockSecretService); @@ -27,7 +30,7 @@ private void testEncryptRestrictedUrn() throws Exception { @Test private void testDecryptRestrictedUrn() throws Exception { - SecretService mockSecretService = Mockito.mock(SecretService.class); + SecretService mockSecretService = mock(SecretService.class); Mockito.when(mockSecretService.decrypt(ENCRYPED_DATASET_URN)) .thenReturn(TEST_DATASET_URN.toString()); final RestrictedService service = new RestrictedService(mockSecretService); diff --git a/metadata-service/war/src/main/resources/boot/policies.json b/metadata-service/war/src/main/resources/boot/policies.json index b41f8aee267d3..eb1f0a9b47e35 100644 --- a/metadata-service/war/src/main/resources/boot/policies.json +++ b/metadata-service/war/src/main/resources/boot/policies.json @@ -55,7 +55,6 @@ "privileges":[ "EDIT_ENTITY", "VIEW_ENTITY_PAGE", - "VIEW_ENTITY", "EDIT_LINEAGE", "EDIT_ENTITY_ASSERTIONS", "SEARCH_PRIVILEGE", @@ -107,7 +106,6 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", - "VIEW_ENTITY", "SEARCH_PRIVILEGE", "GET_COUNTS_PRIVILEGE", "GET_TIMESERIES_ASPECT_PRIVILEGE", @@ -201,7 +199,6 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", - "VIEW_ENTITY", "EDIT_ENTITY_TAGS", "EDIT_ENTITY_GLOSSARY_TERMS", "EDIT_ENTITY_OWNERS", @@ -286,7 +283,6 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", - "VIEW_ENTITY", "EDIT_ENTITY_TAGS", "EDIT_ENTITY_GLOSSARY_TERMS", "EDIT_ENTITY_DOCS", @@ -406,7 +402,6 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", - "VIEW_ENTITY", "VIEW_DATASET_USAGE", "VIEW_DATASET_PROFILE", "SEARCH_PRIVILEGE", @@ -435,7 +430,6 @@ }, "privileges":[ "VIEW_ENTITY_PAGE", - "VIEW_ENTITY", "EDIT_ENTITY_TAGS", "EDIT_ENTITY_GLOSSARY_TERMS", "EDIT_ENTITY_OWNERS", diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiGroup.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiGroup.java new file mode 100644 index 0000000000000..e732cf74fa0c6 --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiGroup.java @@ -0,0 +1,10 @@ +package com.linkedin.metadata.authorization; + +public enum ApiGroup { + ENTITY, + LINEAGE, + ANALYTICS, + TIMESERIES, + COUNTS, + RELATIONSHIP +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiOperation.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiOperation.java new file mode 100644 index 0000000000000..a7c8906a99554 --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/ApiOperation.java @@ -0,0 +1,14 @@ +package com.linkedin.metadata.authorization; + +public enum ApiOperation { + CREATE, + READ, + UPDATE, + DELETE, + EXISTS, + /** + * Manage is a composite of all privileges which can be reduced to UPDATE (CREATE, READ, EXISTS) + * and DELETE in the case where there is not an explicit MANAGE privilege + */ + MANAGE +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/Conjunctive.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/Conjunctive.java new file mode 100644 index 0000000000000..2e4684ac61020 --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/Conjunctive.java @@ -0,0 +1,20 @@ +package com.linkedin.metadata.authorization; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.stream.Collectors; + +public class Conjunctive extends ArrayList { + public static Conjunctive of(E... elements) { + return new Conjunctive<>(Arrays.stream(elements).distinct().collect(Collectors.toList())); + } + + public Conjunctive() { + super(); + } + + public Conjunctive(Collection c) { + super(c); + } +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/Disjunctive.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/Disjunctive.java new file mode 100644 index 0000000000000..416590ecf9f55 --- /dev/null +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/Disjunctive.java @@ -0,0 +1,58 @@ +package com.linkedin.metadata.authorization; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedList; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +public class Disjunctive extends ArrayList { + public static final Disjunctive> DENY_ACCESS = + new Disjunctive<>(); + + public static Disjunctive> disjoint(E... elements) { + Disjunctive> result = new Disjunctive<>(); + result.addAll( + Arrays.stream(elements).map(Conjunctive::of).distinct().collect(Collectors.toList())); + return result; + } + + public static Disjunctive of(E... elements) { + return new Disjunctive<>(Arrays.stream(elements).distinct().collect(Collectors.toList())); + } + + public Disjunctive() { + super(); + } + + public Disjunctive(Collection c) { + super(c); + } + + /** + * For each distjoint conjunctive within a and b produce a combined disjunctive + * + *

a = [[priv1.1 && priv1.2] || [priv2.1 && priv2.2]] b = [[priv3.1 && priv3.2] || [priv4.1 && + * priv4.2]] + * + *

disjunctive of [ [priv1.1 && priv1.2 && priv3.1 && priv3.2] || [priv2.1 && priv2.2 && + * priv4.1 && priv4.2] || [priv2.1 && priv2.2 && priv3.1 && priv3.2] || [priv2.1 && priv2.2 && + * priv4.1 && priv4.2] ] + */ + public static Disjunctive> conjoin( + Disjunctive> a, Disjunctive> b) { + List> conjunctives = new LinkedList<>(); + for (Conjunctive conjunctA : a) { + for (Conjunctive conjunctB : b) { + conjunctives.add( + new Conjunctive( + Stream.concat(conjunctA.stream(), conjunctB.stream()) + .distinct() + .collect(Collectors.toList()))); + } + } + return new Disjunctive<>(conjunctives.stream().distinct().collect(Collectors.toList())); + } +} diff --git a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java index 2898e17193815..60b2c611440b2 100644 --- a/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java +++ b/metadata-utils/src/main/java/com/linkedin/metadata/authorization/PoliciesConfig.java @@ -1,11 +1,28 @@ package com.linkedin.metadata.authorization; +import static com.linkedin.metadata.authorization.ApiGroup.ENTITY; +import static com.linkedin.metadata.authorization.ApiOperation.READ; +import static com.linkedin.metadata.authorization.Disjunctive.DENY_ACCESS; + import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.linkedin.common.UrnArray; +import com.linkedin.common.urn.Urn; +import com.linkedin.data.template.StringArray; +import com.linkedin.metadata.Constants; +import com.linkedin.policy.DataHubActorFilter; +import com.linkedin.policy.DataHubPolicyInfo; +import com.linkedin.policy.DataHubResourceFilter; +import com.linkedin.policy.PolicyMatchCondition; +import com.linkedin.policy.PolicyMatchCriterion; +import com.linkedin.policy.PolicyMatchCriterionArray; +import com.linkedin.policy.PolicyMatchFilter; import java.util.Collection; import java.util.List; -import java.util.Set; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; +import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.Data; import lombok.Getter; @@ -20,32 +37,32 @@ public class PoliciesConfig { // Platform Privileges // - public static final Privilege MANAGE_POLICIES_PRIVILEGE = + static final Privilege MANAGE_POLICIES_PRIVILEGE = Privilege.of( "MANAGE_POLICIES", "Manage Policies", "Create and remove access control policies. Be careful - Actors with this privilege are effectively super users."); - public static final Privilege MANAGE_INGESTION_PRIVILEGE = + static final Privilege MANAGE_INGESTION_PRIVILEGE = Privilege.of( "MANAGE_INGESTION", "Manage Metadata Ingestion", "Create, remove, and update Metadata Ingestion sources."); - public static final Privilege MANAGE_SECRETS_PRIVILEGE = + static final Privilege MANAGE_SECRETS_PRIVILEGE = Privilege.of( "MANAGE_SECRETS", "Manage Secrets", "Create & remove Secrets stored inside DataHub."); - public static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = + static final Privilege MANAGE_USERS_AND_GROUPS_PRIVILEGE = Privilege.of( "MANAGE_USERS_AND_GROUPS", "Manage Users & Groups", "Create, remove, and update users and groups on DataHub."); - public static final Privilege VIEW_ANALYTICS_PRIVILEGE = + private static final Privilege VIEW_ANALYTICS_PRIVILEGE = Privilege.of("VIEW_ANALYTICS", "View Analytics", "View the DataHub analytics dashboard."); - public static final Privilege GET_ANALYTICS_PRIVILEGE = + private static final Privilege GET_ANALYTICS_PRIVILEGE = Privilege.of( "GET_ANALYTICS_PRIVILEGE", "Analytics API access", @@ -137,21 +154,16 @@ public class PoliciesConfig { // Resource Privileges // - public static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = + static final Privilege VIEW_ENTITY_PAGE_PRIVILEGE = Privilege.of("VIEW_ENTITY_PAGE", "View Entity Page", "The ability to view the entity page."); - public static final Privilege VIEW_ENTITY_PRIVILEGE = + static final Privilege EXISTS_ENTITY_PRIVILEGE = Privilege.of( - "VIEW_ENTITY", "View Entity", "The ability to view the entity in search results."); + "EXISTS_ENTITY", "Entity Exists", "The ability to determine whether the entity exists."); - /* - These two privileges are logically the same for search for now. - In the future, we might allow search but not the entity page view. - */ - public static final Set VIEW_ENTITY_PRIVILEGES = - Set.of( - PoliciesConfig.VIEW_ENTITY_PRIVILEGE.getType(), - PoliciesConfig.VIEW_ENTITY_PAGE_PRIVILEGE.getType()); + static final Privilege CREATE_ENTITY_PRIVILEGE = + Privilege.of( + "CREATE_ENTITY", "Create Entity", "The ability to create an entity if it doesn't exist."); public static final Privilege EDIT_ENTITY_TAGS_PRIVILEGE = Privilege.of( @@ -227,10 +239,10 @@ public class PoliciesConfig { "Edit Entity", "The ability to edit any information about an entity. Super user privileges for the entity."); - public static final Privilege DELETE_ENTITY_PRIVILEGE = + static final Privilege DELETE_ENTITY_PRIVILEGE = Privilege.of("DELETE_ENTITY", "Delete", "The ability to delete the delete this entity."); - public static final Privilege EDIT_LINEAGE_PRIVILEGE = + static final Privilege EDIT_LINEAGE_PRIVILEGE = Privilege.of( "EDIT_LINEAGE", "Edit Lineage", @@ -268,9 +280,10 @@ public class PoliciesConfig { EDIT_ENTITY_DEPRECATION_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, - VIEW_ENTITY_PRIVILEGE, EDIT_ENTITY_PROPERTIES_PRIVILEGE, - EDIT_ENTITY_INCIDENTS_PRIVILEGE); + EDIT_ENTITY_INCIDENTS_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE); // Dataset Privileges public static final Privilege EDIT_DATASET_COL_TAGS_PRIVILEGE = @@ -354,19 +367,19 @@ public class PoliciesConfig { Privilege.of( "GET_TIMELINE_PRIVILEGE", "Get Timeline API", "The ability to use the GET Timeline API."); - public static final Privilege GET_ENTITY_PRIVILEGE = + static final Privilege GET_ENTITY_PRIVILEGE = Privilege.of( "GET_ENTITY_PRIVILEGE", "Get Entity + Relationships API", "The ability to use the GET Entity and Relationships API."); - public static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = + static final Privilege GET_TIMESERIES_ASPECT_PRIVILEGE = Privilege.of( "GET_TIMESERIES_ASPECT_PRIVILEGE", "Get Timeseries Aspect API", "The ability to use the GET Timeseries Aspect API."); - public static final Privilege GET_COUNTS_PRIVILEGE = + static final Privilege GET_COUNTS_PRIVILEGE = Privilege.of( "GET_COUNTS_PRIVILEGE", "Get Aspect/Entity Count APIs", @@ -402,7 +415,9 @@ public class PoliciesConfig { "Explain ElasticSearch Query API", "The ability to use the Operations API explain endpoint."); - public static final Privilege SEARCH_PRIVILEGE = + /* Prefer per entity READ */ + @Deprecated + static final Privilege SEARCH_PRIVILEGE = Privilege.of("SEARCH_PRIVILEGE", "Search API", "The ability to access search APIs."); public static final Privilege SET_WRITEABLE_PRIVILEGE = @@ -502,7 +517,9 @@ public class PoliciesConfig { EDIT_TAG_COLOR_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, - DELETE_ENTITY_PRIVILEGE)); + DELETE_ENTITY_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE)); // Container Privileges public static final ResourcePrivileges CONTAINER_PRIVILEGES = @@ -530,7 +547,9 @@ public class PoliciesConfig { EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE, MANAGE_DATA_PRODUCTS_PRIVILEGE, - EDIT_ENTITY_PROPERTIES_PRIVILEGE)); + EDIT_ENTITY_PROPERTIES_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE)); // Data Product Privileges public static final ResourcePrivileges DATA_PRODUCT_PRIVILEGES = @@ -548,7 +567,9 @@ public class PoliciesConfig { EDIT_ENTITY_TAGS_PRIVILEGE, EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE, EDIT_ENTITY_DOMAINS_PRIVILEGE, - EDIT_ENTITY_PROPERTIES_PRIVILEGE)); + EDIT_ENTITY_PROPERTIES_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE)); // Glossary Term Privileges public static final ResourcePrivileges GLOSSARY_TERM_PRIVILEGES = @@ -563,7 +584,9 @@ public class PoliciesConfig { EDIT_ENTITY_DOC_LINKS_PRIVILEGE, EDIT_ENTITY_DEPRECATION_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, - EDIT_ENTITY_PROPERTIES_PRIVILEGE)); + EDIT_ENTITY_PROPERTIES_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE)); // Glossary Node Privileges public static final ResourcePrivileges GLOSSARY_NODE_PRIVILEGES = @@ -580,7 +603,9 @@ public class PoliciesConfig { EDIT_ENTITY_PRIVILEGE, MANAGE_GLOSSARY_CHILDREN_PRIVILEGE, MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE, - EDIT_ENTITY_PROPERTIES_PRIVILEGE)); + EDIT_ENTITY_PROPERTIES_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE)); // Group Privileges public static final ResourcePrivileges CORP_GROUP_PRIVILEGES = @@ -595,7 +620,9 @@ public class PoliciesConfig { EDIT_CONTACT_INFO_PRIVILEGE, EDIT_ENTITY_DOCS_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, - EDIT_ENTITY_PROPERTIES_PRIVILEGE)); + EDIT_ENTITY_PROPERTIES_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE)); // User Privileges public static final ResourcePrivileges CORP_USER_PRIVILEGES = @@ -608,7 +635,9 @@ public class PoliciesConfig { EDIT_CONTACT_INFO_PRIVILEGE, EDIT_USER_PROFILE_PRIVILEGE, EDIT_ENTITY_PRIVILEGE, - EDIT_ENTITY_PROPERTIES_PRIVILEGE)); + EDIT_ENTITY_PROPERTIES_PRIVILEGE, + CREATE_ENTITY_PRIVILEGE, + EXISTS_ENTITY_PRIVILEGE)); // ERModelRelationship Privileges public static final ResourcePrivileges ER_MODEL_RELATIONSHIP_PRIVILEGES = @@ -652,17 +681,291 @@ public class PoliciesConfig { .add(ALL_RESOURCE_PRIVILEGES) .build(); + /* + * This is an attempt to piece together and organize CRUD-like semantics from the various existing + * privileges. These are intended to govern lower level APIs to which CRUD semantics apply. + * + * These are collections of disjoint privileges, meaning a single privilege will grant access to + * the operation. + */ + public static Map>>> + API_PRIVILEGE_MAP = + ImmutableMap.>>>builder() + .put( + ApiGroup.ENTITY, + ImmutableMap.>>builder() + .put( + ApiOperation.CREATE, + Disjunctive.disjoint(CREATE_ENTITY_PRIVILEGE, EDIT_ENTITY_PRIVILEGE)) + .put( + ApiOperation.READ, + Disjunctive.disjoint( + VIEW_ENTITY_PAGE_PRIVILEGE, + GET_ENTITY_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE)) + .put(ApiOperation.UPDATE, Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE)) + .put(ApiOperation.DELETE, Disjunctive.disjoint(DELETE_ENTITY_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + Disjunctive.disjoint( + EXISTS_ENTITY_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + VIEW_ENTITY_PAGE_PRIVILEGE, + SEARCH_PRIVILEGE)) + .build()) + .put( + ApiGroup.LINEAGE, + ImmutableMap.>>builder() + .put( + ApiOperation.CREATE, + Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE, EDIT_LINEAGE_PRIVILEGE)) + .put( + ApiOperation.READ, + Disjunctive.disjoint( + VIEW_ENTITY_PAGE_PRIVILEGE, + GET_ENTITY_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + EDIT_LINEAGE_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE)) + .put( + ApiOperation.UPDATE, + Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE, EDIT_LINEAGE_PRIVILEGE)) + .put( + ApiOperation.DELETE, + Disjunctive.disjoint( + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + EDIT_LINEAGE_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + Disjunctive.disjoint( + EXISTS_ENTITY_PRIVILEGE, + VIEW_ENTITY_PAGE_PRIVILEGE, + SEARCH_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + EDIT_LINEAGE_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + GET_ENTITY_PRIVILEGE)) + .build()) + .put( + ApiGroup.RELATIONSHIP, + ImmutableMap.>>builder() + .put(ApiOperation.CREATE, Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE)) + .put( + ApiOperation.READ, + Disjunctive.disjoint( + VIEW_ENTITY_PAGE_PRIVILEGE, + GET_ENTITY_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE)) + .put(ApiOperation.UPDATE, Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE)) + .put( + ApiOperation.DELETE, + Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + Disjunctive.disjoint( + EXISTS_ENTITY_PRIVILEGE, + VIEW_ENTITY_PAGE_PRIVILEGE, + SEARCH_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + GET_ENTITY_PRIVILEGE)) + .build()) + .put( + ApiGroup.ANALYTICS, + ImmutableMap.>>builder() + .put(ApiOperation.CREATE, DENY_ACCESS) + .put( + ApiOperation.READ, + Disjunctive.disjoint(VIEW_ANALYTICS_PRIVILEGE, GET_ANALYTICS_PRIVILEGE)) + .put(ApiOperation.UPDATE, DENY_ACCESS) + .put(ApiOperation.DELETE, DENY_ACCESS) + .put( + ApiOperation.EXISTS, + Disjunctive.disjoint( + VIEW_ANALYTICS_PRIVILEGE, GET_ANALYTICS_PRIVILEGE, SEARCH_PRIVILEGE)) + .build()) + .put( + ApiGroup.TIMESERIES, + ImmutableMap.>>builder() + .put(ApiOperation.CREATE, Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE)) + .put( + ApiOperation.READ, + Disjunctive.disjoint( + GET_TIMESERIES_ASPECT_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE)) + .put(ApiOperation.UPDATE, Disjunctive.disjoint(EDIT_ENTITY_PRIVILEGE)) + .put(ApiOperation.DELETE, Disjunctive.disjoint(DELETE_ENTITY_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + Disjunctive.disjoint( + EXISTS_ENTITY_PRIVILEGE, + DELETE_ENTITY_PRIVILEGE, + SEARCH_PRIVILEGE, + EDIT_ENTITY_PRIVILEGE, + GET_TIMESERIES_ASPECT_PRIVILEGE, + VIEW_ENTITY_PAGE_PRIVILEGE)) + .build()) + .put( + ApiGroup.COUNTS, + ImmutableMap.>>builder() + .put(ApiOperation.CREATE, DENY_ACCESS) + .put(ApiOperation.READ, Disjunctive.disjoint(GET_COUNTS_PRIVILEGE)) + .put(ApiOperation.UPDATE, DENY_ACCESS) + .put(ApiOperation.DELETE, DENY_ACCESS) + .put(ApiOperation.EXISTS, DENY_ACCESS) + .build()) + .build(); + + /** Contains entity specific privileges, default to map above for non-specific entities */ + public static final Map>>> + API_ENTITY_PRIVILEGE_MAP = + ImmutableMap.>>>builder() + .put( + Constants.POLICY_ENTITY_NAME, + ImmutableMap.>>builder() + .put(ApiOperation.CREATE, Disjunctive.disjoint(MANAGE_POLICIES_PRIVILEGE)) + .put( + ApiOperation.READ, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.READ)) + .put(ApiOperation.UPDATE, Disjunctive.disjoint(MANAGE_POLICIES_PRIVILEGE)) + .put(ApiOperation.DELETE, Disjunctive.disjoint(MANAGE_POLICIES_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.EXISTS)) + .build()) + .put( + Constants.CORP_USER_ENTITY_NAME, + ImmutableMap.>>builder() + .put( + ApiOperation.CREATE, + Disjunctive.disjoint(MANAGE_USERS_AND_GROUPS_PRIVILEGE)) + .put( + ApiOperation.READ, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.READ)) + .put( + ApiOperation.UPDATE, + Disjunctive.disjoint(MANAGE_USERS_AND_GROUPS_PRIVILEGE)) + .put( + ApiOperation.DELETE, + Disjunctive.disjoint(MANAGE_USERS_AND_GROUPS_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.EXISTS)) + .build()) + .put( + Constants.CORP_GROUP_ENTITY_NAME, + ImmutableMap.>>builder() + .put( + ApiOperation.CREATE, + Disjunctive.disjoint(MANAGE_USERS_AND_GROUPS_PRIVILEGE)) + .put( + ApiOperation.READ, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.READ)) + .put( + ApiOperation.UPDATE, + Disjunctive.disjoint(MANAGE_USERS_AND_GROUPS_PRIVILEGE)) + .put( + ApiOperation.DELETE, + Disjunctive.disjoint(MANAGE_USERS_AND_GROUPS_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.EXISTS)) + .build()) + .put( + Constants.INGESTION_SOURCE_ENTITY_NAME, + ImmutableMap.>>builder() + .put(ApiOperation.CREATE, Disjunctive.disjoint(MANAGE_INGESTION_PRIVILEGE)) + .put( + ApiOperation.READ, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.READ)) + .put(ApiOperation.UPDATE, Disjunctive.disjoint(MANAGE_INGESTION_PRIVILEGE)) + .put(ApiOperation.DELETE, Disjunctive.disjoint(MANAGE_INGESTION_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.EXISTS)) + .build()) + .put( + Constants.SECRETS_ENTITY_NAME, + ImmutableMap.>>builder() + .put(ApiOperation.CREATE, Disjunctive.disjoint(MANAGE_SECRETS_PRIVILEGE)) + .put(ApiOperation.READ, Disjunctive.disjoint(MANAGE_SECRETS_PRIVILEGE)) + .put(ApiOperation.UPDATE, Disjunctive.disjoint(MANAGE_SECRETS_PRIVILEGE)) + .put(ApiOperation.DELETE, Disjunctive.disjoint(MANAGE_SECRETS_PRIVILEGE)) + .put( + ApiOperation.EXISTS, + API_PRIVILEGE_MAP.get(ApiGroup.ENTITY).get(ApiOperation.EXISTS)) + .build()) + .build(); + + /** + * Define default policies for given actors. Typically used to define allow privileges on self in + * a common way across APIs + * + * @param actorUrn + * @return + */ + public static List getDefaultPolicies(Urn actorUrn) { + return ImmutableList.builder() + .add( + new DataHubPolicyInfo() + .setDisplayName("View Self") + .setDescription("View self entity page.") + .setActors(new DataHubActorFilter().setUsers(new UrnArray(actorUrn))) + .setPrivileges( + PoliciesConfig.API_PRIVILEGE_MAP.get(ENTITY).get(READ).stream() + .flatMap(Collection::stream) + .map(PoliciesConfig.Privilege::getType) + .collect(Collectors.toCollection(StringArray::new))) + .setType(PoliciesConfig.METADATA_POLICY_TYPE) + .setState(PoliciesConfig.ACTIVE_POLICY_STATE) + .setEditable(false) + .setResources( + new DataHubResourceFilter() + .setFilter( + new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + List.of( + new PolicyMatchCriterion() + .setField("URN") + .setCondition(PolicyMatchCondition.EQUALS) + .setValues( + new StringArray( + List.of(actorUrn.toString()))))))))) + .build(); + } + @Data @Getter @AllArgsConstructor public static class Privilege { - private String type; + @Nonnull private String type; private String displayName; private String description; static Privilege of(String type, String displayName, String description) { return new Privilege(type, displayName, description); } + + // Not using displayName or description in equality by design + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + Privilege privilege = (Privilege) o; + + return type.equals(privilege.type); + } + + @Override + public int hashCode() { + return type.hashCode(); + } } @Data diff --git a/metadata-utils/src/test/java/com/linkedin/metadata/authorization/DisjunctiveTest.java b/metadata-utils/src/test/java/com/linkedin/metadata/authorization/DisjunctiveTest.java new file mode 100644 index 0000000000000..a43a97bb541b5 --- /dev/null +++ b/metadata-utils/src/test/java/com/linkedin/metadata/authorization/DisjunctiveTest.java @@ -0,0 +1,39 @@ +package com.linkedin.metadata.authorization; + +import static com.linkedin.metadata.authorization.PoliciesConfig.DELETE_ENTITY_PRIVILEGE; +import static com.linkedin.metadata.authorization.PoliciesConfig.EDIT_ENTITY_PRIVILEGE; +import static com.linkedin.metadata.authorization.PoliciesConfig.EDIT_LINEAGE_PRIVILEGE; +import static com.linkedin.metadata.authorization.PoliciesConfig.VIEW_ENTITY_PAGE_PRIVILEGE; +import static org.testng.Assert.assertEquals; + +import java.util.List; +import org.testng.annotations.Test; + +public class DisjunctiveTest { + + @Test + public void testDisjointHelper() { + assertEquals( + Disjunctive.disjoint(VIEW_ENTITY_PAGE_PRIVILEGE, VIEW_ENTITY_PAGE_PRIVILEGE), + new Disjunctive<>(List.of(new Conjunctive<>(List.of(VIEW_ENTITY_PAGE_PRIVILEGE))))); + } + + @Test + public void testDisjointConjoin() { + Disjunctive> a = + new Disjunctive<>( + List.of(Conjunctive.of(EDIT_ENTITY_PRIVILEGE), Conjunctive.of(EDIT_LINEAGE_PRIVILEGE))); + Disjunctive> b = + new Disjunctive<>( + List.of( + Conjunctive.of(DELETE_ENTITY_PRIVILEGE), Conjunctive.of(EDIT_LINEAGE_PRIVILEGE))); + + assertEquals( + Disjunctive.conjoin(a, b), + Disjunctive.of( + Conjunctive.of(EDIT_ENTITY_PRIVILEGE, DELETE_ENTITY_PRIVILEGE), + Conjunctive.of(EDIT_ENTITY_PRIVILEGE, EDIT_LINEAGE_PRIVILEGE), + Conjunctive.of(EDIT_LINEAGE_PRIVILEGE, DELETE_ENTITY_PRIVILEGE), + Conjunctive.of(EDIT_LINEAGE_PRIVILEGE))); + } +} diff --git a/settings.gradle b/settings.gradle index 27928ae7446a1..ade0c818b130d 100644 --- a/settings.gradle +++ b/settings.gradle @@ -9,6 +9,7 @@ include 'metadata-service:auth-impl' include 'metadata-service:auth-filter' include 'metadata-service:auth-servlet-impl' include 'metadata-service:restli-api' +include 'metadata-service:restli-client-api' include 'metadata-service:restli-client' include 'metadata-service:restli-servlet-impl' include 'metadata-service:graphql-servlet-impl' diff --git a/smoke-test/build.gradle b/smoke-test/build.gradle index 2cedb3c7570b8..1f676edeb7d49 100644 --- a/smoke-test/build.gradle +++ b/smoke-test/build.gradle @@ -82,6 +82,7 @@ task lintFix(type: Exec, dependsOn: installDev) { task quickstartNoCypressSuite0(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' + environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' environment 'TEST_STRATEGY', 'no_cypress_suite0' workingDir = project.projectDir @@ -93,6 +94,7 @@ task quickstartNoCypressSuite0(type: Exec, dependsOn: [installDev, ':metadata-in task quickstartNoCypressSuite1(type: Exec, dependsOn: [installDev, ':metadata-ingestion:installDev']) { environment 'RUN_QUICKSTART', 'false' environment 'DATAHUB_KAFKA_SCHEMA_REGISTRY_URL', 'http://localhost:8080/schema-registry/api/' + environment 'KAFKA_BROKER_CONTAINER', 'datahub-kafka-broker-1' environment 'TEST_STRATEGY', 'no_cypress_suite1' workingDir = project.projectDir diff --git a/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py b/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py index d5e1ade663dff..6be23b2567f67 100644 --- a/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py +++ b/smoke-test/tests/cli/user_groups_cmd/test_group_cmd.py @@ -106,11 +106,7 @@ def test_group_upsert(wait_for_healthchecks: Any) -> None: "owners": [ {"owner": "urn:li:corpuser:user1", "type": "TECHNICAL_OWNER"} ], - "ownerTypes": { - "urn:li:ownershipType:__system__none": [ - "urn:li:corpuser:user1", - ], - }, + "ownerTypes": {}, }, "status": {"removed": False}, } From 088e7a87d823a4776a80ebc5498a8edd47cc294f Mon Sep 17 00:00:00 2001 From: david-leifker <114954101+david-leifker@users.noreply.github.com> Date: Sat, 23 Mar 2024 12:13:26 -0500 Subject: [PATCH 15/18] fix(searchContext): fix search flag immutability (#10117) --- .github/actions/ci-optimization/action.yml | 1 + .../metadata/context/SearchContext.java | 12 ++++++++---- .../metadata/context/SearchContextTest.java | 13 +++++++++++++ 3 files changed, 22 insertions(+), 4 deletions(-) diff --git a/.github/actions/ci-optimization/action.yml b/.github/actions/ci-optimization/action.yml index cad3a03dcb464..c36b739701217 100644 --- a/.github/actions/ci-optimization/action.yml +++ b/.github/actions/ci-optimization/action.yml @@ -70,6 +70,7 @@ runs: - "metadata-jobs/**" - "metadata-service/**" - "metadata-utils/**" + - "metadata-operation-context/**" - "datahub-graphql-core/**" - "smoke-test/**" - "docker/**" diff --git a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java index d4e3712309d6c..ac9c903fb432d 100644 --- a/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java +++ b/metadata-operation-context/src/main/java/io/datahubproject/metadata/context/SearchContext.java @@ -21,10 +21,14 @@ public class SearchContext implements ContextInterface { public static SearchContext withFlagDefaults( @Nonnull SearchContext searchContext, @Nonnull Function flagDefaults) { - return searchContext.toBuilder() - // update search flags - .searchFlags(flagDefaults.apply(searchContext.getSearchFlags())) - .build(); + try { + return searchContext.toBuilder() + // update search flags + .searchFlags(flagDefaults.apply(searchContext.getSearchFlags().copy())) + .build(); + } catch (CloneNotSupportedException e) { + throw new RuntimeException(e); + } } @Nonnull private final IndexConvention indexConvention; diff --git a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java index 26365c283fc57..4858bb342258a 100644 --- a/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java +++ b/metadata-operation-context/src/test/java/io/datahubproject/metadata/context/SearchContextTest.java @@ -76,4 +76,17 @@ public void searchContextId() { .getCacheKeyComponent(), "Expected differences in search flags to result in different caches"); } + + @Test + public void testImmutableSearchFlags() { + SearchContext initial = + SearchContext.builder().indexConvention(IndexConventionImpl.NO_PREFIX).build(); + assertEquals(initial.getSearchFlags(), new SearchFlags().setSkipCache(false)); + + SearchContext mutated = initial.withFlagDefaults(flags -> flags.setSkipCache(true)); + assertEquals(mutated.getSearchFlags(), new SearchFlags().setSkipCache(true)); + + // ensure original is not changed + assertEquals(initial.getSearchFlags(), new SearchFlags().setSkipCache(false)); + } } From 9de15a273a52a127e3f9494fa90c59641e72ffba Mon Sep 17 00:00:00 2001 From: k7ragav <67264597+k7ragav@users.noreply.github.com> Date: Sun, 24 Mar 2024 08:01:28 +0100 Subject: [PATCH 16/18] fix(ingest/looker): use `external_base_url` for explore url generation (#10093) --- .../src/datahub/ingestion/source/looker/looker_source.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py index e42e7b82bf8ab..c63a845633825 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py +++ b/metadata-ingestion/src/datahub/ingestion/source/looker/looker_source.py @@ -753,7 +753,7 @@ def fetch_one_explore( looker_explore._to_metadata_events( self.source_config, self.reporter, - self.source_config.base_url, + self.source_config.external_base_url or self.source_config.base_url, self.source_config.extract_embed_urls, ) or events From 7e5610f358a5f7ffcef23c90fdd1740c1381d1bd Mon Sep 17 00:00:00 2001 From: Tamas Nemeth Date: Mon, 25 Mar 2024 13:28:35 +0100 Subject: [PATCH 17/18] feat(ingest/dagster): Dagster source (#10071) Co-authored-by: shubhamjagtap639 --- .github/workflows/build-and-test.yml | 2 + .github/workflows/dagster-plugin.yml | 85 +++ .github/workflows/test-results.yml | 2 +- docs-website/build.gradle | 10 +- docs-website/generateDocsDir.ts | 1 + docs-website/sidebars.js | 6 + docs/lineage/dagster.md | 89 ++++ .../dagster-plugin/.gitignore | 143 +++++ .../dagster-plugin/README.md | 4 + .../dagster-plugin/build.gradle | 131 +++++ .../examples/advanced_ops_jobs.py | 106 ++++ .../dagster-plugin/examples/assets_job.py | 63 +++ .../dagster-plugin/examples/basic_setup.py | 20 + .../dagster-plugin/examples/ops_job.py | 51 ++ .../dagster-plugin/pyproject.toml | 19 + .../dagster-plugin/scripts/release.sh | 26 + .../dagster-plugin/setup.cfg | 73 +++ .../dagster-plugin/setup.py | 136 +++++ .../src/datahub_dagster_plugin/__init__.py | 21 + .../datahub_dagster_plugin/client/__init__.py | 0 .../client/dagster_generator.py | 504 ++++++++++++++++++ .../datahub_dagster_plugin.py | 2 + .../sensors/__init__.py | 0 .../sensors/datahub_sensors.py | 439 +++++++++++++++ .../integration/integration_test_dummy.py | 2 + .../dagster-plugin/tests/unit/test_dagster.py | 303 +++++++++++ .../dagster-plugin/tests/unit/test_dummy.py | 2 + metadata-ingestion/developing.md | 9 + .../source/data_lake_common/path_spec.py | 1 + .../local/golden_mces_single_file.json | 162 ++++-- settings.gradle | 1 + 31 files changed, 2357 insertions(+), 56 deletions(-) create mode 100644 .github/workflows/dagster-plugin.yml create mode 100644 docs/lineage/dagster.md create mode 100644 metadata-ingestion-modules/dagster-plugin/.gitignore create mode 100644 metadata-ingestion-modules/dagster-plugin/README.md create mode 100644 metadata-ingestion-modules/dagster-plugin/build.gradle create mode 100644 metadata-ingestion-modules/dagster-plugin/examples/advanced_ops_jobs.py create mode 100644 metadata-ingestion-modules/dagster-plugin/examples/assets_job.py create mode 100644 metadata-ingestion-modules/dagster-plugin/examples/basic_setup.py create mode 100644 metadata-ingestion-modules/dagster-plugin/examples/ops_job.py create mode 100644 metadata-ingestion-modules/dagster-plugin/pyproject.toml create mode 100755 metadata-ingestion-modules/dagster-plugin/scripts/release.sh create mode 100644 metadata-ingestion-modules/dagster-plugin/setup.cfg create mode 100644 metadata-ingestion-modules/dagster-plugin/setup.py create mode 100644 metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/__init__.py create mode 100644 metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/client/__init__.py create mode 100644 metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/client/dagster_generator.py create mode 100644 metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/datahub_dagster_plugin.py create mode 100644 metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/sensors/__init__.py create mode 100644 metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/sensors/datahub_sensors.py create mode 100644 metadata-ingestion-modules/dagster-plugin/tests/integration/integration_test_dummy.py create mode 100644 metadata-ingestion-modules/dagster-plugin/tests/unit/test_dagster.py create mode 100644 metadata-ingestion-modules/dagster-plugin/tests/unit/test_dummy.py diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index df223e3603e1e..837838352c8fd 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -84,6 +84,8 @@ jobs: -x :metadata-io:test \ -x :metadata-ingestion-modules:airflow-plugin:build \ -x :metadata-ingestion-modules:airflow-plugin:check \ + -x :metadata-ingestion-modules:dagster-plugin:build \ + -x :metadata-ingestion-modules:dagster-plugin:check \ -x :datahub-frontend:build \ -x :datahub-web-react:build \ --parallel diff --git a/.github/workflows/dagster-plugin.yml b/.github/workflows/dagster-plugin.yml new file mode 100644 index 0000000000000..48f1b24196c9e --- /dev/null +++ b/.github/workflows/dagster-plugin.yml @@ -0,0 +1,85 @@ +name: Dagster Plugin +on: + push: + branches: + - master + paths: + - ".github/workflows/dagster-plugin.yml" + - "metadata-ingestion-modules/dagster-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + pull_request: + branches: + - master + paths: + - ".github/**" + - "metadata-ingestion-modules/dagster-plugin/**" + - "metadata-ingestion/**" + - "metadata-models/**" + release: + types: [published] + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + dagster-plugin: + runs-on: ubuntu-latest + env: + SPARK_VERSION: 3.0.3 + DATAHUB_TELEMETRY_ENABLED: false + strategy: + matrix: + python-version: ["3.8", "3.10"] + include: + - python-version: "3.8" + extraPythonRequirement: "dagster>=1.3.3" + - python-version: "3.10" + extraPythonRequirement: "dagster>=1.3.3" + fail-fast: false + steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + cache: "pip" + - name: Install dependencies + run: ./metadata-ingestion/scripts/install_deps.sh + - name: Install dagster package and test (extras ${{ matrix.extraPythonRequirement }}) + run: ./gradlew -Pextra_pip_requirements='${{ matrix.extraPythonRequirement }}' :metadata-ingestion-modules:dagster-plugin:lint :metadata-ingestion-modules:dagster-plugin:testQuick + - name: pip freeze show list installed + if: always() + run: source metadata-ingestion-modules/dagster-plugin/venv/bin/activate && pip freeze + - uses: actions/upload-artifact@v3 + if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }} + with: + name: Test Results (dagster Plugin ${{ matrix.python-version}}) + path: | + **/build/reports/tests/test/** + **/build/test-results/test/** + **/junit.*.xml + - name: Upload coverage to Codecov + if: always() + uses: codecov/codecov-action@v3 + with: + token: ${{ secrets.CODECOV_TOKEN }} + directory: . + fail_ci_if_error: false + flags: dagster-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }} + name: pytest-dagster + verbose: true + + event-file: + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v3 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/test-results.yml b/.github/workflows/test-results.yml index 0153060692271..c94a5fc340f47 100644 --- a/.github/workflows/test-results.yml +++ b/.github/workflows/test-results.yml @@ -2,7 +2,7 @@ name: Test Results on: workflow_run: - workflows: ["build & test", "metadata ingestion", "Airflow Plugin"] + workflows: ["build & test", "metadata ingestion", "Airflow Plugin", "Dagster Plugin"] types: - completed diff --git a/docs-website/build.gradle b/docs-website/build.gradle index 702ec0429780f..d1fdc6dc83365 100644 --- a/docs-website/build.gradle +++ b/docs-website/build.gradle @@ -79,11 +79,19 @@ task yarnInstall(type: YarnTask) { ) outputs.dir('node_modules') } +task airflowPluginBuild(dependsOn: [':metadata-ingestion-modules:airflow-plugin:buildWheel']) { +} + +// The Dagster plugin build and airflow plugin build can't be built at the same time; otherwise, it will raise +// fatal: Unable to create '/home/runner/work/datahub/datahub/.git/index.lock': File exists.. on CI +task dagsterPluginBuild(dependsOn: [':metadata-ingestion-modules:dagster-plugin:buildWheel', airflowPluginBuild]) { +} task yarnGenerate(type: YarnTask, dependsOn: [yarnInstall, generateGraphQLSchema, generateJsonSchema, ':metadata-ingestion:modelDocGen', ':metadata-ingestion:docGen', - ':metadata-ingestion:buildWheel', ':metadata-ingestion-modules:airflow-plugin:buildWheel'] ) { + ':metadata-ingestion:buildWheel', + airflowPluginBuild, dagsterPluginBuild] ) { inputs.files(projectMdFiles) outputs.cacheIf { true } args = ['run', 'generate'] diff --git a/docs-website/generateDocsDir.ts b/docs-website/generateDocsDir.ts index e19f09530665a..9116218290d32 100644 --- a/docs-website/generateDocsDir.ts +++ b/docs-website/generateDocsDir.ts @@ -572,6 +572,7 @@ function copy_python_wheels(): void { const wheel_dirs = [ "../metadata-ingestion/dist", "../metadata-ingestion-modules/airflow-plugin/dist", + "../metadata-ingestion-modules/dagster-plugin/dist", ]; const wheel_output_directory = path.join(STATIC_DIRECTORY, "wheels"); diff --git a/docs-website/sidebars.js b/docs-website/sidebars.js index f07f1aa031bc7..34398bc8c6661 100644 --- a/docs-website/sidebars.js +++ b/docs-website/sidebars.js @@ -310,6 +310,11 @@ module.exports = { id: "docs/lineage/airflow", label: "Airflow", }, + { + type: "doc", + id: "docs/lineage/dagster", + label: "Dagster", + }, //"docker/airflow/local_airflow", "metadata-integration/java/spark-lineage/README", "metadata-ingestion/integration_docs/great-expectations", @@ -766,6 +771,7 @@ module.exports = { // "metadata-integration/java/spark-lineage-beta/README.md // "metadata-integration/java/openlineage-converter/README" //"metadata-ingestion-modules/airflow-plugin/README" + //"metadata-ingestion-modules/dagster-plugin/README" // "metadata-ingestion/schedule_docs/datahub", // we can delete this // TODO: change the titles of these, removing the "What is..." portion from the sidebar" // "docs/what/entity", diff --git a/docs/lineage/dagster.md b/docs/lineage/dagster.md new file mode 100644 index 0000000000000..785aeaa4c03b2 --- /dev/null +++ b/docs/lineage/dagster.md @@ -0,0 +1,89 @@ +# Dagster Integration +DataHub supports the integration of + +- Dagster Pipeline metadata +- Job and Op run information as well as +- Lineage information when present + +## Using Datahub's Dagster Sensor + +Dagster sensors allow us to perform some actions based on some state change. Datahub's defined dagster sensor will emit metadata after every dagster pipeline run execution. This sensor is able to emit both pipeline success as well as failures. For more details about Dagster sensors please refer [Sensors](https://docs.dagster.io/concepts/partitions-schedules-sensors/sensors). + +### Prerequisites + +1. You need to create a new dagster project. See . +2. There are two ways to define Dagster definition before starting dagster UI. One using [Definitions](https://docs.dagster.io/_apidocs/definitions#dagster.Definitions) class (recommended) and second using [Repositories](https://docs.dagster.io/concepts/repositories-workspaces/repositories#repositories). +3. Creation of new dagster project by default uses Definition class to define Dagster definition. + +### Setup + +1. You need to install the required dependency. + +```shell +pip install acryl_datahub_dagster_plugin +``` + +2. You need to import DataHub dagster plugin provided sensor definition and add it in Dagster definition or dagster repository before starting dagster UI as show below: +**Using Definitions class:** + +```python +{{ inline /metadata-ingestion-modules/dagster-plugin/examples/basic_setup.py }} +``` + +3. The DataHub dagster plugin provided sensor internally uses below configs. You can set these configs using environment variables. If not set, the sensor will take the default value. + + **Configuration options:** + + | Configuration Option | Default value | Description | + |-------------------------------|---------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| + | datahub_client_config | | The DataHub client config | + | dagster_url | | The url to your Dagster Webserver. | + | capture_asset_materialization | True | Whether to capture asset keys as Dataset on AssetMaterialization event | + | capture_input_output | True | Whether to capture and try to parse input and output from HANDLED_OUTPUT,.LOADED_INPUT events. (currently only [PathMetadataValue](https://github.com/dagster-io/dagster/blob/7e08c05dcecef9fd07f887c7846bd1c9a90e7d84/python_modules/dagster/dagster/_core/definitions/metadata/__init__.py#L655) metadata supported (EXPERIMENTAL) | + | platform_instance | | The instance of the platform that all assets produced by this recipe belong to. It is optional | + | asset_lineage_extractor | | You can implement your own logic to capture asset lineage information. See example for details[] | + +4. Once Dagster UI is up, you need to turn on the provided sensor execution. To turn on the sensor, click on Overview tab and then on Sensors tab. You will see a toggle button in front of all defined sensors to turn it on/off. + +5. DataHub dagster plugin provided sensor is ready to emit metadata after every dagster pipeline run execution. + +### How to validate installation + +1. Go and check in Dagster UI at Overview -> Sensors menu if you can see the 'datahub_sensor'. +2. Run a Dagster Job. In the dagster daemon logs, you should see DataHub related log messages like: + +``` +datahub_sensor - Emitting metadata... +``` + +## Dagster Ins and Out + +We can provide inputs and outputs to both assets and ops explicitly using a dictionary of `Ins` and `Out` corresponding to the decorated function arguments. While providing inputs and outputs explicitly we can provide metadata as well. +To create dataset upstream and downstream dependency for the assets and ops you can use an ins and out dictionary with metadata provided. For reference, look at the sample jobs created using assets [`assets_job.py`](../../metadata-ingestion-modules/dagster-plugin/examples/assets_job.py), or ops [`ops_job.py`](../../metadata-ingestion-modules/dagster-plugin/examples/ops_job.py). + +## Define your custom logic to capture asset lineage information +You can define your own logic to capture asset lineage information. + +The output Tuple contains two dictionaries, one for input assets and the other for output assets. The key of the dictionary is the op key and the value is the set of asset urns that are upstream or downstream of the op. + +```python +from datahub_dagster_plugin.client.dagster_generator import DagsterGenerator, DatasetLineage + +def asset_lineage_extractor( + context: RunStatusSensorContext, + dagster_generator: DagsterGenerator, + graph: DataHubGraph, +) -> Dict[str, DatasetLineage]: + dataset_lineage: Dict[str, DatasetLineage] = {} + + # Extracting input and output assets from the context + return dataset_lineage +``` + +[See example job here](https://github.com/datahub-project/datahub/blob/master/metadata-ingestion-modules/dagster-plugin/examples/advanced_ops_jobs.py). + +## Debugging + +### Connection error for Datahub Rest URL + +If you get ConnectionError: HTTPConnectionPool(host='localhost', port=8080), then in that case your DataHub GMS service is not up. diff --git a/metadata-ingestion-modules/dagster-plugin/.gitignore b/metadata-ingestion-modules/dagster-plugin/.gitignore new file mode 100644 index 0000000000000..4ff42af3e16cf --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/.gitignore @@ -0,0 +1,143 @@ +.envrc +src/datahub_dagster_plugin/__init__.py.bak +.vscode/ +output +pvenv36/ +bq_credentials.json +/tmp +*.bak + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Generated classes +src/datahub/metadata/ +wheels/ +junit.quick.xml diff --git a/metadata-ingestion-modules/dagster-plugin/README.md b/metadata-ingestion-modules/dagster-plugin/README.md new file mode 100644 index 0000000000000..8e1460957ed9f --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/README.md @@ -0,0 +1,4 @@ +# Datahub Dagster Plugin + +See the DataHub Dagster docs for details. + diff --git a/metadata-ingestion-modules/dagster-plugin/build.gradle b/metadata-ingestion-modules/dagster-plugin/build.gradle new file mode 100644 index 0000000000000..163e0e6738b4d --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/build.gradle @@ -0,0 +1,131 @@ +plugins { + id 'base' +} + +ext { + python_executable = 'python3' + venv_name = 'venv' +} + +if (!project.hasProperty("extra_pip_requirements")) { + ext.extra_pip_requirements = "" +} + +def pip_install_command = "VIRTUAL_ENV=${venv_name} ${venv_name}/bin/uv pip install -e ../../metadata-ingestion" + +task checkPythonVersion(type: Exec) { + commandLine python_executable, '-c', 'import sys; assert sys.version_info >= (3, 8)' +} + +task environmentSetup(type: Exec, dependsOn: checkPythonVersion) { + def sentinel_file = "${venv_name}/.venv_environment_sentinel" + inputs.file file('setup.py') + outputs.file(sentinel_file) + commandLine 'bash', '-c', + "${python_executable} -m venv ${venv_name} && " + + "${venv_name}/bin/python -m pip install --upgrade pip uv wheel 'setuptools>=63.0.0' && " + + "touch ${sentinel_file}" +} + +task installPackage(type: Exec, dependsOn: [environmentSetup, ':metadata-ingestion:codegen']) { + def sentinel_file = "${venv_name}/.build_install_package_sentinel" + inputs.file file('setup.py') + outputs.file(sentinel_file) + commandLine 'bash', '-c', + "source ${venv_name}/bin/activate && set -x && " + + "uv pip install -e . ${extra_pip_requirements} && " + + "touch ${sentinel_file}" +} + +task install(dependsOn: [installPackage]) + +task installDev(type: Exec, dependsOn: [install]) { + def sentinel_file = "${venv_name}/.build_install_dev_sentinel" + inputs.file file('setup.py') + outputs.file(sentinel_file) + commandLine 'bash', '-c', + "source ${venv_name}/bin/activate && set -x && " + + "uv pip install -e .[dev] ${extra_pip_requirements} && " + + "touch ${sentinel_file}" +} + +task lint(type: Exec, dependsOn: installDev) { + /* + The find/sed combo below is a temporary work-around for the following mypy issue with airflow 2.2.0: + "venv/lib/python3.8/site-packages/airflow/_vendor/connexion/spec.py:169: error: invalid syntax". + */ + commandLine 'bash', '-c', + "source ${venv_name}/bin/activate && set -x && " + + "black --check --diff src/ tests/ examples/ && " + + "isort --check --diff src/ tests/ examples/ && " + + "flake8 --count --statistics src/ tests/ examples/ && " + + "mypy --show-traceback --show-error-codes src/ tests/ examples/" +} +task lintFix(type: Exec, dependsOn: installDev) { + commandLine 'bash', '-x', '-c', + "source ${venv_name}/bin/activate && " + + "black src/ tests/ examples/ && " + + "isort src/ tests/ examples/ && " + + "flake8 src/ tests/ examples/ && " + + "mypy src/ tests/ examples/" +} + +task installDevTest(type: Exec, dependsOn: [installDev]) { + def sentinel_file = "${venv_name}/.build_install_dev_test_sentinel" + inputs.file file('setup.py') + outputs.dir("${venv_name}") + outputs.file(sentinel_file) + commandLine 'bash', '-c', + "source ${venv_name}/bin/activate && set -x && " + + "uv pip install -e .[dev,integration-tests] ${extra_pip_requirements} && " + + "touch ${sentinel_file}" +} + +def testFile = hasProperty('testFile') ? testFile : 'unknown' +task testSingle(dependsOn: [installDevTest]) { + doLast { + if (testFile != 'unknown') { + exec { + commandLine 'bash', '-x', '-c', + "source ${venv_name}/bin/activate && pytest ${testFile}" + } + } else { + throw new GradleException("No file provided. Use -PtestFile=") + } + } +} + +task testQuick(type: Exec, dependsOn: installDevTest) { + // We can't enforce the coverage requirements if we run a subset of the tests. + inputs.files(project.fileTree(dir: "src/", include: "**/*.py")) + inputs.files(project.fileTree(dir: "tests/")) + outputs.dir("${venv_name}") + commandLine 'bash', '-x', '-c', + "source ${venv_name}/bin/activate && pytest -vv --continue-on-collection-errors --junit-xml=junit.quick.xml" +} + + +task testFull(type: Exec, dependsOn: [testQuick, installDevTest]) { + commandLine 'bash', '-x', '-c', + "source ${venv_name}/bin/activate && pytest -m 'not slow_integration' -vv --continue-on-collection-errors --junit-xml=junit.full.xml" +} +task buildWheel(type: Exec, dependsOn: [install]) { + commandLine 'bash', '-c', "source ${venv_name}/bin/activate && " + + 'uv pip install build && RELEASE_VERSION="\${RELEASE_VERSION:-0.0.0.dev1}" RELEASE_SKIP_TEST=1 RELEASE_SKIP_UPLOAD=1 ./scripts/release.sh' +} + +task cleanPythonCache(type: Exec) { + commandLine 'bash', '-c', + "find src -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete -o -type d -empty -delete" +} + +build.dependsOn install +check.dependsOn lint +check.dependsOn testQuick + +clean { + delete venv_name + delete 'build' + delete 'dist' +} +clean.dependsOn cleanPythonCache diff --git a/metadata-ingestion-modules/dagster-plugin/examples/advanced_ops_jobs.py b/metadata-ingestion-modules/dagster-plugin/examples/advanced_ops_jobs.py new file mode 100644 index 0000000000000..d4cc65297e42c --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/examples/advanced_ops_jobs.py @@ -0,0 +1,106 @@ +from typing import Dict + +from dagster import ( + Definitions, + In, + Out, + PythonObjectDagsterType, + RunStatusSensorContext, + job, + op, +) +from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph +from datahub.utilities.urns.dataset_urn import DatasetUrn + +from datahub_dagster_plugin.client.dagster_generator import ( + DagsterGenerator, + DatasetLineage, +) +from datahub_dagster_plugin.sensors.datahub_sensors import ( + DatahubDagsterSourceConfig, + make_datahub_sensor, +) + + +@op +def extract(): + results = [1, 2, 3, 4] + return results + + +@op( + ins={ + "data": In( + dagster_type=PythonObjectDagsterType(list), + metadata={"datahub.inputs": [DatasetUrn("snowflake", "tableA").urn]}, + ) + }, + out={ + "result": Out( + metadata={"datahub.outputs": [DatasetUrn("snowflake", "tableB").urn]} + ) + }, +) +def transform(data): + results = [] + for each in data: + results.append(str(each)) + return results + + +@job +def do_stuff(): + transform(extract()) + + +def asset_lineage_extractor( + context: RunStatusSensorContext, + dagster_generator: DagsterGenerator, + graph: DataHubGraph, +) -> Dict[str, DatasetLineage]: + from dagster._core.events import DagsterEventType + + logs = context.instance.all_logs( + context.dagster_run.run_id, + { + DagsterEventType.ASSET_MATERIALIZATION, + DagsterEventType.ASSET_OBSERVATION, + DagsterEventType.HANDLED_OUTPUT, + DagsterEventType.LOADED_INPUT, + }, + ) + + dataset_lineage: Dict[str, DatasetLineage] = {} + + for log in logs: + if not log.dagster_event or not log.step_key: + continue + + if log.dagster_event.event_type == DagsterEventType.ASSET_MATERIALIZATION: + if log.step_key not in dataset_lineage: + dataset_lineage[log.step_key] = DatasetLineage(set(), set()) + + materialization = log.asset_materialization + if not materialization: + continue + + properties = { + key: str(value) for (key, value) in materialization.metadata.items() + } + asset_key = materialization.asset_key.path + dataset_urn = dagster_generator.emit_asset( + graph, asset_key, materialization.description, properties + ) + dataset_lineage[log.step_key].outputs.add(dataset_urn) + + return dataset_lineage + + +config = DatahubDagsterSourceConfig( + datahub_client_config=DatahubClientConfig(server="http://localhost:8080"), + dagster_url="http://localhost:3000", + asset_lineage_extractor=asset_lineage_extractor, +) + +datahub_sensor = make_datahub_sensor(config=config) +defs = Definitions(jobs=[do_stuff], sensors=[datahub_sensor]) diff --git a/metadata-ingestion-modules/dagster-plugin/examples/assets_job.py b/metadata-ingestion-modules/dagster-plugin/examples/assets_job.py new file mode 100644 index 0000000000000..57634ab345a5e --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/examples/assets_job.py @@ -0,0 +1,63 @@ +from dagster import ( + AssetIn, + AssetOut, + Definitions, + Output, + asset, + define_asset_job, + multi_asset, +) +from datahub.utilities.urns.dataset_urn import DatasetUrn + +from datahub_dagster_plugin.sensors.datahub_sensors import ( + DatahubDagsterSourceConfig, + make_datahub_sensor, +) + + +@multi_asset( + outs={ + "extract": AssetOut( + metadata={"datahub.outputs": [DatasetUrn("snowflake", "tableD").urn]} + ), + } +) +def extract(): + results = [1, 2, 3, 4] + metadata = { + "num_record": len(results), + } + return Output(value=results, metadata=metadata) + + +@asset( + ins={ + "extract": AssetIn( + "extract", + metadata={"datahub.inputs": [DatasetUrn("snowflake", "tableC").urn()]}, + ) + } +) +def transform(extract): + results = [] + for each in extract: + results.append(str(each)) + return results + + +assets_job = define_asset_job(name="assets_job") + +config = DatahubDagsterSourceConfig.parse_obj( + { + "rest_sink_config": { + "server": "http://localhost:8080", + }, + "dagster_url": "http://localhost:3000", + } +) + +datahub_sensor = make_datahub_sensor(config=config) + +defs = Definitions( + assets=[extract, transform], jobs=[assets_job], sensors=[datahub_sensor] +) diff --git a/metadata-ingestion-modules/dagster-plugin/examples/basic_setup.py b/metadata-ingestion-modules/dagster-plugin/examples/basic_setup.py new file mode 100644 index 0000000000000..300cf9df022c6 --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/examples/basic_setup.py @@ -0,0 +1,20 @@ +from dagster import Definitions +from datahub.ingestion.graph.client import DatahubClientConfig + +from datahub_dagster_plugin.sensors.datahub_sensors import ( + DatahubDagsterSourceConfig, + make_datahub_sensor, +) + +config = DatahubDagsterSourceConfig( + datahub_client_config=DatahubClientConfig( + server="https://your_datahub_url/gms", token="your_datahub_token" + ), + dagster_url="https://my-dagster-cloud.dagster.cloud", +) + +datahub_sensor = make_datahub_sensor(config=config) + +defs = Definitions( + sensors=[datahub_sensor], +) diff --git a/metadata-ingestion-modules/dagster-plugin/examples/ops_job.py b/metadata-ingestion-modules/dagster-plugin/examples/ops_job.py new file mode 100644 index 0000000000000..d743e19a235d5 --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/examples/ops_job.py @@ -0,0 +1,51 @@ +from dagster import Definitions, In, Out, PythonObjectDagsterType, job, op +from datahub.utilities.urns.dataset_urn import DatasetUrn + +from datahub_dagster_plugin.sensors.datahub_sensors import ( + DatahubDagsterSourceConfig, + make_datahub_sensor, +) + + +@op +def extract(): + results = [1, 2, 3, 4] + return results + + +@op( + ins={ + "data": In( + dagster_type=PythonObjectDagsterType(list), + metadata={"datahub.inputs": [DatasetUrn("snowflake", "tableA").urn]}, + ) + }, + out={ + "result": Out( + metadata={"datahub.outputs": [DatasetUrn("snowflake", "tableB").urn]} + ) + }, +) +def transform(data): + results = [] + for each in data: + results.append(str(each)) + return results + + +@job +def do_stuff(): + transform(extract()) + + +config = DatahubDagsterSourceConfig.parse_obj( + { + "rest_sink_config": { + "server": "http://localhost:8080", + }, + "dagster_url": "http://localhost:3000", + } +) + +datahub_sensor = make_datahub_sensor(config=config) +defs = Definitions(jobs=[do_stuff], sensors=[datahub_sensor]) diff --git a/metadata-ingestion-modules/dagster-plugin/pyproject.toml b/metadata-ingestion-modules/dagster-plugin/pyproject.toml new file mode 100644 index 0000000000000..fba81486b9f67 --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/pyproject.toml @@ -0,0 +1,19 @@ +[build-system] +build-backend = "setuptools.build_meta" +requires = ["setuptools>=54.0.0", "wheel", "pip>=21.0.0"] + +[tool.black] +extend-exclude = ''' +# A regex preceded with ^/ will apply only to files and directories +# in the root of the project. +^/tmp +''' +include = '\.pyi?$' + +[tool.isort] +indent = ' ' +profile = 'black' +sections = 'FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER' + +[tool.pyright] +extraPaths = ['tests'] \ No newline at end of file diff --git a/metadata-ingestion-modules/dagster-plugin/scripts/release.sh b/metadata-ingestion-modules/dagster-plugin/scripts/release.sh new file mode 100755 index 0000000000000..30219956534d9 --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/scripts/release.sh @@ -0,0 +1,26 @@ +#!/bin/bash +set -euxo pipefail + +if [[ ! ${RELEASE_SKIP_TEST:-} ]]; then + ../../gradlew build # also runs tests +elif [[ ! ${RELEASE_SKIP_INSTALL:-} ]]; then + ../../gradlew install +fi + +MODULE=datahub_dagster_plugin + +# Check packaging constraint. +python -c 'import setuptools; where="./src"; assert setuptools.find_packages(where) == setuptools.find_namespace_packages(where), "you seem to be missing or have extra __init__.py files"' +if [[ ${RELEASE_VERSION:-} ]]; then + # Replace version with RELEASE_VERSION env variable + sed -i.bak "s/__version__ = \"0.0.0.dev0\"/__version__ = \"$RELEASE_VERSION\"/" src/${MODULE}/__init__.py +else + vim src/${MODULE}/__init__.py +fi + +rm -rf build dist || true +python -m build +if [[ ! ${RELEASE_SKIP_UPLOAD:-} ]]; then + python -m twine upload 'dist/*' +fi +git restore src/${MODULE}/__init__.py diff --git a/metadata-ingestion-modules/dagster-plugin/setup.cfg b/metadata-ingestion-modules/dagster-plugin/setup.cfg new file mode 100644 index 0000000000000..20a903914332a --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/setup.cfg @@ -0,0 +1,73 @@ +[flake8] +max-complexity = 15 +ignore = + # Ignore: line length issues, since black's formatter will take care of them. + E501, + # Ignore: 1 blank line required before class docstring. + D203, + # See https://stackoverflow.com/a/57074416. + W503, + # See https://github.com/psf/black/issues/315. + E203 +exclude = + .git, + venv, + .tox, + __pycache__ +per-file-ignores = + # imported but unused + __init__.py: F401 +ban-relative-imports = true + +[mypy] +plugins = + pydantic.mypy +exclude = ^(venv|build|dist)/ +ignore_missing_imports = yes +strict_optional = yes +check_untyped_defs = yes +disallow_incomplete_defs = yes +disallow_untyped_decorators = yes +warn_unused_configs = yes +# eventually we'd like to enable these +disallow_untyped_defs = no + +# try to be a bit more strict in certain areas of the codebase +[mypy-datahub.*] +ignore_missing_imports = no +[mypy-tests.*] +ignore_missing_imports = no + +[tool:pytest] +asyncio_mode = auto +addopts = --cov=src --cov-report term-missing --cov-config setup.cfg --strict-markers + +testpaths = + tests/unit + tests/integration + +[coverage:run] +# Because of some quirks in the way setup.cfg, coverage.py, pytest-cov, +# and tox interact, we should not uncomment the following line. +# See https://pytest-cov.readthedocs.io/en/latest/config.html and +# https://coverage.readthedocs.io/en/coverage-5.0/config.html. +# We also have some additional pytest/cov config options in tox.ini. +# source = src + +[coverage:paths] +# This is necessary for tox-based coverage to be counted properly. +source = + src + */site-packages + +[coverage:report] +# The fail_under value ensures that at least some coverage data is collected. +# We override its value in the tox config. +show_missing = true +exclude_lines = + pragma: no cover + @abstract + if TYPE_CHECKING: +omit = + # omit example jobs + src/datahub_dagster_plugin/example_jobs/* diff --git a/metadata-ingestion-modules/dagster-plugin/setup.py b/metadata-ingestion-modules/dagster-plugin/setup.py new file mode 100644 index 0000000000000..60b960e653eb2 --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/setup.py @@ -0,0 +1,136 @@ +import os +import pathlib + +import setuptools + +package_metadata: dict = {} +with open("./src/datahub_dagster_plugin/__init__.py") as fp: + exec(fp.read(), package_metadata) + + +def get_long_description(): + root = os.path.dirname(__file__) + return pathlib.Path(os.path.join(root, "README.md")).read_text() + + +rest_common = {"requests", "requests_file"} + +_version: str = package_metadata["__version__"] +_self_pin = ( + f"=={_version}" if not (_version.endswith("dev0") or "docker" in _version) else "" +) + +base_requirements = { + # Actual dependencies. + "dagster >= 1.3.3", + "dagit >= 1.3.3", + *rest_common, + # Ignoring the dependency below because it causes issues with the vercel built wheel install + #f"acryl-datahub[datahub-rest]{_self_pin}", + "acryl-datahub[datahub-rest]", +} + +mypy_stubs = { + "types-dataclasses", + "sqlalchemy-stubs", + "types-pkg_resources", + "types-six", + "types-python-dateutil", + "types-requests", + "types-toml", + "types-PyYAML", + "types-freezegun", + "types-cachetools", + # versions 0.1.13 and 0.1.14 seem to have issues + "types-click==0.1.12", + "types-tabulate", + # avrogen package requires this + "types-pytz", +} + +base_dev_requirements = { + *base_requirements, + *mypy_stubs, + "black==22.12.0", + "coverage>=5.1", + "flake8>=6.0.0", + "flake8-tidy-imports>=4.3.0", + "flake8-bugbear==23.3.12", + "isort>=5.7.0", + "mypy>=1.4.0", + # pydantic 1.8.2 is incompatible with mypy 0.910. + # See https://github.com/samuelcolvin/pydantic/pull/3175#issuecomment-995382910. + "pydantic>=1.10.0,!=1.10.3", + "pytest>=6.2.2", + "pytest-asyncio>=0.16.0", + "pytest-cov>=2.8.1", + "tox", + "deepdiff", + "requests-mock", + "freezegun", + "jsonpickle", + "build", + "twine", + "packaging", +} + +dev_requirements = { + *base_dev_requirements, +} + +integration_test_requirements = { + *dev_requirements, +} + +entry_points = { + "dagster.plugins": "acryl-datahub-dagster-plugin = datahub_dagster_plugin.datahub_dagster_plugin:DatahubDagsterPlugin" +} + + +setuptools.setup( + # Package metadata. + name=package_metadata["__package_name__"], + version=package_metadata["__version__"], + url="https://datahubproject.io/", + project_urls={ + "Documentation": "https://datahubproject.io/docs/", + "Source": "https://github.com/datahub-project/datahub", + "Changelog": "https://github.com/datahub-project/datahub/releases", + }, + license="Apache License 2.0", + description="Datahub Dagster plugin to capture executions and send to Datahub", + long_description=get_long_description(), + long_description_content_type="text/markdown", + classifiers=[ + "Development Status :: 5 - Production/Stable", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Intended Audience :: Developers", + "Intended Audience :: Information Technology", + "Intended Audience :: System Administrators", + "License :: OSI Approved", + "License :: OSI Approved :: Apache Software License", + "Operating System :: Unix", + "Operating System :: POSIX :: Linux", + "Environment :: Console", + "Environment :: MacOS X", + "Topic :: Software Development", + ], + # Package info. + zip_safe=False, + python_requires=">=3.8", + package_dir={"": "src"}, + packages=setuptools.find_namespace_packages(where="./src"), + entry_points=entry_points, + # Dependencies. + install_requires=list(base_requirements), + extras_require={ + "ignore": [], # This is a dummy extra to allow for trailing commas in the list. + "dev": list(dev_requirements), + "integration-tests": list(integration_test_requirements), + }, +) diff --git a/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/__init__.py b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/__init__.py new file mode 100644 index 0000000000000..1ecfc362ceb4e --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/__init__.py @@ -0,0 +1,21 @@ +# Published at https://pypi.org/project/acryl-datahub/. +__package_name__ = "acryl-datahub-dagster-plugin" +__version__ = "0.0.0.dev0" + + +def is_dev_mode() -> bool: + return __version__.endswith("dev0") + + +def nice_version_name() -> str: + if is_dev_mode(): + return "unavailable (installed in develop mode)" + return __version__ + + +def get_provider_info(): + return { + "package-name": f"{__package_name__}", + "name": f"{__package_name__}", + "description": "Datahub metadata collector plugin", + } diff --git a/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/client/__init__.py b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/client/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/client/dagster_generator.py b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/client/dagster_generator.py new file mode 100644 index 0000000000000..c00160dfb0319 --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/client/dagster_generator.py @@ -0,0 +1,504 @@ +from dataclasses import dataclass +from logging import Logger +from typing import Any, Callable, Dict, List, NamedTuple, Optional, Sequence, Set +from urllib.parse import urlsplit + +import pydantic +from dagster import DagsterRunStatus, PathMetadataValue, RunStatusSensorContext +from dagster._core.execution.stats import RunStepKeyStatsSnapshot, StepEventStatus +from dagster._core.snap import JobSnapshot +from dagster._core.snap.node import OpDefSnap +from dagster._core.storage.dagster_run import DagsterRun, DagsterRunStatsSnapshot +from datahub.api.entities.datajob import DataFlow, DataJob +from datahub.api.entities.dataprocess.dataprocess_instance import ( + DataProcessInstance, + InstanceRunResult, +) +from datahub.api.entities.dataset.dataset import Dataset +from datahub.configuration.source_common import DatasetSourceConfigMixin +from datahub.emitter.mce_builder import ( + make_data_platform_urn, + make_dataplatform_instance_urn, +) +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DatahubClientConfig, DataHubGraph +from datahub.metadata.schema_classes import DataPlatformInstanceClass, SubTypesClass +from datahub.utilities.urns.data_flow_urn import DataFlowUrn +from datahub.utilities.urns.data_job_urn import DataJobUrn +from datahub.utilities.urns.dataset_urn import DatasetUrn + + +class Constant: + """ + keys used in dagster plugin + """ + + ORCHESTRATOR = "dagster" + + # Default config constants + DEFAULT_DATAHUB_REST_URL = "http://localhost:8080" + + # Environment variable contants + DATAHUB_REST_URL = "DATAHUB_REST_URL" + DATAHUB_ENV = "DATAHUB_ENV" + DATAHUB_PLATFORM_INSTANCE = "DATAHUB_PLATFORM_INSTANCE" + DAGSTER_UI_URL = "DAGSTER_UI_URL" + + # Datahub inputs/outputs constant + DATAHUB_INPUTS = "datahub.inputs" + DATAHUB_OUTPUTS = "datahub.outputs" + + # Job run constant + JOB_SNAPSHOT_ID = "job_snapshot_id" + EXECUTION_PLAN_SNAPSHOT_ID = "execution_plan_snapshot_id" + ROOT_RUN_ID = "root_run_id" + PARENT_RUN_ID = "parent_run_id" + HAS_REPOSITORY_LOAD_DATA = "has_repository_load_data" + TAGS = "tags" + STEPS_SUCCEEDED = "steps_succeeded" + STEPS_FAILED = "steps_failed" + MATERIALIZATIONS = "materializations" + EXPECTATIONS = "expectations" + ENQUEUED_TIME = "enqueued_time" + LAUNCH_TIME = "launch_time" + START_TIME = "start_time" + END_TIME = "end_time" + + # Op run contants + STEP_KEY = "step_key" + ATTEMPTS = "attempts" + + +class DatasetLineage(NamedTuple): + inputs: Set[str] + outputs: Set[str] + + +class DatahubDagsterSourceConfig(DatasetSourceConfigMixin): + datahub_client_config: DatahubClientConfig = pydantic.Field( + default=DatahubClientConfig(), + description="Datahub client config", + ) + + dagster_url: Optional[str] = pydantic.Field( + default=None, + description="Dagster UI URL. Like: https://myDagsterCloudEnvironment.dagster.cloud/prod", + ) + + capture_asset_materialization: bool = pydantic.Field( + default=True, + description="Whether to capture asset keys as Dataset on AssetMaterialization event", + ) + + capture_input_output: bool = pydantic.Field( + default=False, + description="Whether to capture and try to parse input and output from HANDLED_OUTPUT, LOADED_INPUT event. (currently only filepathvalue metadata supported", + ) + + asset_lineage_extractor: Optional[ + Callable[ + [RunStatusSensorContext, "DagsterGenerator", DataHubGraph], + Dict[str, DatasetLineage], + ] + ] = pydantic.Field( + default=None, + description="Custom asset lineage extractor function. See details at [https://datahubproject.io/docs/lineage/dagster/#define-your-custom-logic-to-capture-asset-lineage-information]", + ) + + +def _str_urn_to_dataset_urn(urns: List[str]) -> List[DatasetUrn]: + return [DatasetUrn.create_from_string(urn) for urn in urns] + + +@dataclass +class DagsterEnvironment: + repository: Optional[str] + is_cloud: bool = True + is_branch_deployment: bool = False + branch: Optional[str] = "prod" + module: Optional[str] = None + + +def job_url_generator(dagster_url: str, dagster_environment: DagsterEnvironment) -> str: + if dagster_environment.is_cloud: + base_url = f"{dagster_url}/{dagster_environment.branch}" + else: + base_url = dagster_url + + if dagster_environment.module: + base_url = f"{base_url}/locations/{dagster_environment.module}" + + return base_url + + +class DagsterGenerator: + def __init__( + self, + logger: Logger, + config: DatahubDagsterSourceConfig, + dagster_environment: DagsterEnvironment, + ): + self.logger = logger + self.config = config + self.dagster_environment = dagster_environment + + def path_metadata_resolver(self, value: PathMetadataValue) -> Optional[DatasetUrn]: + """ + Resolve path metadata to dataset urn + """ + path = value.value + if not path: + return None + + if "://" in path: + url = urlsplit(path) + scheme = url.scheme + + # Need to adjust some these schemes + if scheme in ["s3a", "s3n"]: + scheme = "s3" + elif scheme in ["gs"]: + scheme = "gcs" + + return DatasetUrn(platform=scheme, name=url.path) + else: + return DatasetUrn(platform="file", name=path) + + def metadata_resolver(self, metadata: Any) -> Optional[DatasetUrn]: + """ + Resolve metadata to dataset urn + """ + if isinstance(metadata, PathMetadataValue): + return self.path_metadata_resolver(metadata) + else: + self.logger.info(f"Unknown Metadata: {metadata} of type {type(metadata)}") + return None + + def generate_dataflow( + self, + job_snapshot: JobSnapshot, + env: str, + platform_instance: Optional[str] = None, + ) -> DataFlow: + """ + Generates a Dataflow object from an Dagster Job Snapshot + :param job_snapshot: JobSnapshot - Job snapshot object + :param env: str + :param platform_instance: Optional[str] + :return: DataFlow - Data generated dataflow + """ + if self.dagster_environment.is_cloud: + id = f"{self.dagster_environment.branch}/{self.dagster_environment.module}/{job_snapshot.name}" + else: + id = f"{self.dagster_environment.module}/{job_snapshot.name}" + + dataflow = DataFlow( + orchestrator=Constant.ORCHESTRATOR, + id=id, + env=env, + name=job_snapshot.name, + platform_instance=platform_instance, + ) + dataflow.description = job_snapshot.description + dataflow.tags = set(job_snapshot.tags.keys()) + if self.config.dagster_url: + dataflow.url = f"{job_url_generator(dagster_url=self.config.dagster_url, dagster_environment=self.dagster_environment)}/jobs/{job_snapshot.name}" + flow_property_bag: Dict[str, str] = {} + for key in job_snapshot.metadata.keys(): + flow_property_bag[key] = str(job_snapshot.metadata[key]) + dataflow.properties = flow_property_bag + return dataflow + + def generate_datajob( + self, + job_snapshot: JobSnapshot, + step_deps: Dict[str, List], + op_def_snap: OpDefSnap, + env: str, + input_datasets: Dict[str, Set[DatasetUrn]], + output_datasets: Dict[str, Set[DatasetUrn]], + platform_instance: Optional[str] = None, + ) -> DataJob: + """ + Generates a Datajob object from an Dagster op snapshot + :param job_snapshot: JobSnapshot - Job snapshot object + :param op_def_snap: OpDefSnap - Op def snapshot object + :param env: str + :param platform_instance: Optional[str] + :param output_datasets: dict[str, Set[DatasetUrn]] - output datasets for each op + :return: DataJob - Data generated datajob + """ + + if self.dagster_environment.is_cloud: + flow_id = f"{self.dagster_environment.branch}/{self.dagster_environment.module}/{job_snapshot.name}" + job_id = f"{self.dagster_environment.branch}/{self.dagster_environment.module}/{op_def_snap.name}" + else: + flow_id = f"{self.dagster_environment.module}/{job_snapshot.name}" + job_id = f"{self.dagster_environment.module}/{op_def_snap.name}" + + dataflow_urn = DataFlowUrn.create_from_ids( + orchestrator=Constant.ORCHESTRATOR, + flow_id=flow_id, + env=env, + platform_instance=platform_instance, + ) + datajob = DataJob( + id=job_id, + flow_urn=dataflow_urn, + name=op_def_snap.name, + ) + + if self.config.dagster_url: + datajob.url = f"{job_url_generator(dagster_url=self.config.dagster_url, dagster_environment=self.dagster_environment)}/jobs/{job_snapshot.name}/{op_def_snap.name}" + + datajob.description = op_def_snap.description + datajob.tags = set(op_def_snap.tags.keys()) + + # Add upstream dependencies for this op + for upstream_op_name in step_deps[op_def_snap.name]: + if self.dagster_environment.is_cloud: + upstream_job_id = f"{self.dagster_environment.branch}/{self.dagster_environment.module}/{upstream_op_name}" + else: + upstream_job_id = ( + f"{self.dagster_environment.module}/{upstream_op_name}" + ) + upstream_op_urn = DataJobUrn.create_from_ids( + data_flow_urn=str(dataflow_urn), + job_id=upstream_job_id, + ) + datajob.upstream_urns.extend([upstream_op_urn]) + job_property_bag: Dict[str, str] = {} + if input_datasets: + self.logger.info( + f"Input datasets for {op_def_snap.name} are { list(input_datasets.get(op_def_snap.name, []))}" + ) + datajob.inlets = list(input_datasets.get(op_def_snap.name, [])) + + if output_datasets: + self.logger.info( + f"Output datasets for {op_def_snap.name} are { list(output_datasets.get(op_def_snap.name, []))}" + ) + datajob.outlets = list(output_datasets.get(op_def_snap.name, [])) + + # For all op inputs/outputs: + # Add input/output details like its type, description, metadata etc in datajob properties. + # Also, add datahub inputs/outputs if present in input/output metatdata. + for input_def_snap in op_def_snap.input_def_snaps: + job_property_bag[f"input.{input_def_snap.name}"] = str( + input_def_snap._asdict() + ) + if Constant.DATAHUB_INPUTS in input_def_snap.metadata: + datajob.inlets.extend( + _str_urn_to_dataset_urn( + input_def_snap.metadata[Constant.DATAHUB_INPUTS].value # type: ignore + ) + ) + + for output_def_snap in op_def_snap.output_def_snaps: + job_property_bag[f"output_{output_def_snap.name}"] = str( + output_def_snap._asdict() + ) + if Constant.DATAHUB_OUTPUTS in output_def_snap.metadata: + datajob.outlets.extend( + _str_urn_to_dataset_urn( + output_def_snap.metadata[Constant.DATAHUB_OUTPUTS].value # type: ignore + ) + ) + + datajob.properties = job_property_bag + + return datajob + + def emit_job_run( + self, + graph: DataHubGraph, + dataflow: DataFlow, + run: DagsterRun, + run_stats: DagsterRunStatsSnapshot, + ) -> None: + """ + Emit a latest job run + :param graph: DatahubRestEmitter + :param dataflow: DataFlow - DataFlow object + :param run: DagsterRun - Dagster Run object + :param run_stats: DagsterRunStatsSnapshot - latest job run stats + """ + dpi = DataProcessInstance.from_dataflow(dataflow=dataflow, id=run_stats.run_id) + if self.config.dagster_url: + if self.dagster_environment.is_cloud: + dpi.url = f"{self.config.dagster_url}/{self.dagster_environment.branch}/runs/{run.run_id}" + else: + dpi.url = f"{self.config.dagster_url}/runs/{run.run_id}" + + # Add below details in dpi properties + dpi_property_bag: Dict[str, str] = {} + allowed_job_run_keys = [ + Constant.JOB_SNAPSHOT_ID, + Constant.EXECUTION_PLAN_SNAPSHOT_ID, + Constant.ROOT_RUN_ID, + Constant.PARENT_RUN_ID, + Constant.HAS_REPOSITORY_LOAD_DATA, + Constant.TAGS, + Constant.STEPS_SUCCEEDED, + Constant.STEPS_FAILED, + Constant.MATERIALIZATIONS, + Constant.EXPECTATIONS, + Constant.ENQUEUED_TIME, + Constant.LAUNCH_TIME, + Constant.START_TIME, + Constant.END_TIME, + ] + for key in allowed_job_run_keys: + if hasattr(run, key) and getattr(run, key) is not None: + dpi_property_bag[key] = str(getattr(run, key)) + if hasattr(run_stats, key) and getattr(run_stats, key) is not None: + dpi_property_bag[key] = str(getattr(run_stats, key)) + dpi.properties.update(dpi_property_bag) + + status_result_map = { + DagsterRunStatus.SUCCESS: InstanceRunResult.SUCCESS, + DagsterRunStatus.FAILURE: InstanceRunResult.FAILURE, + DagsterRunStatus.CANCELED: InstanceRunResult.SKIPPED, + } + + if run.status not in status_result_map: + raise Exception( + f"Job run status should be either complete, failed or cancelled and it was " + f"{run.status }" + ) + + if run_stats.start_time is not None: + dpi.emit_process_start( + emitter=graph, + start_timestamp_millis=int(run_stats.start_time * 1000), + ) + + if run_stats.end_time is not None: + dpi.emit_process_end( + emitter=graph, + end_timestamp_millis=int(run_stats.end_time * 1000), + result=status_result_map[run.status], + result_type=Constant.ORCHESTRATOR, + ) + + def emit_op_run( + self, + graph: DataHubGraph, + datajob: DataJob, + run_step_stats: RunStepKeyStatsSnapshot, + ) -> None: + """ + Emit an op run + :param graph: DataHubGraph + :param datajob: DataJob - DataJob object + :param run_step_stats: RunStepKeyStatsSnapshot - step(op) run stats + """ + dpi = DataProcessInstance.from_datajob( + datajob=datajob, + id=f"{run_step_stats.run_id}.{datajob.id}", + clone_inlets=True, + clone_outlets=True, + ) + if self.config.dagster_url: + dpi.url = f"{self.config.dagster_url}/runs/{run_step_stats.run_id}" + if self.dagster_environment.is_cloud: + dpi.url = f"{self.config.dagster_url}/{self.dagster_environment.branch}/runs/{run_step_stats.run_id}" + else: + dpi.url = f"{self.config.dagster_url}/runs/{run_step_stats.run_id}" + + # Add below details in dpi properties + dpi_property_bag: Dict[str, str] = {} + allowed_op_run_keys = [ + Constant.STEP_KEY, + Constant.ATTEMPTS, + Constant.START_TIME, + Constant.END_TIME, + ] + for key in allowed_op_run_keys: + if ( + hasattr(run_step_stats, key) + and getattr(run_step_stats, key) is not None + ): + dpi_property_bag[key] = str(getattr(run_step_stats, key)) + dpi.properties.update(dpi_property_bag) + + status_result_map = { + StepEventStatus.SUCCESS: InstanceRunResult.SUCCESS, + StepEventStatus.FAILURE: InstanceRunResult.FAILURE, + StepEventStatus.SKIPPED: InstanceRunResult.SKIPPED, + } + + if run_step_stats.status not in status_result_map: + raise Exception( + f"Step run status should be either complete, failed or cancelled and it was " + f"{run_step_stats.status }" + ) + + if run_step_stats.start_time is not None: + dpi.emit_process_start( + emitter=graph, + start_timestamp_millis=int(run_step_stats.start_time * 1000), + ) + + if run_step_stats.end_time is not None: + dpi.emit_process_end( + emitter=graph, + end_timestamp_millis=int(run_step_stats.end_time * 1000), + result=status_result_map[run_step_stats.status], + result_type=Constant.ORCHESTRATOR, + ) + + def dataset_urn_from_asset(self, asset_key: Sequence[str]) -> DatasetUrn: + """ + Generate dataset urn from asset key + """ + return DatasetUrn( + platform="dagster", env=self.config.env, name="/".join(asset_key) + ) + + def emit_asset( + self, + graph: DataHubGraph, + asset_key: Sequence[str], + description: Optional[str], + properties: Optional[Dict[str, str]], + ) -> str: + """ + Emit asset to datahub + """ + dataset_urn = self.dataset_urn_from_asset(asset_key) + dataset = Dataset( + id=None, + urn=dataset_urn.urn(), + platform="dagster", + name=asset_key[-1], + schema=None, + downstreams=None, + subtype="Asset", + subtypes=None, + description=description, + env=self.config.env, + properties=properties, + ) + for mcp in dataset.generate_mcp(): + graph.emit_mcp(mcp) + + mcp = MetadataChangeProposalWrapper( + entityUrn=dataset_urn.urn(), + aspect=SubTypesClass(typeNames=["Asset"]), + ) + graph.emit_mcp(mcp) + + if self.config.platform_instance: + mcp = MetadataChangeProposalWrapper( + entityUrn=dataset_urn.urn(), + aspect=DataPlatformInstanceClass( + instance=make_dataplatform_instance_urn( + instance=self.config.platform_instance, + platform="dagster", + ), + platform=make_data_platform_urn("dagster"), + ), + ) + graph.emit_mcp(mcp) + return dataset_urn.urn() diff --git a/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/datahub_dagster_plugin.py b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/datahub_dagster_plugin.py new file mode 100644 index 0000000000000..3a66f97fe90bd --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/datahub_dagster_plugin.py @@ -0,0 +1,2 @@ +class DatahubDagsterPlugin: + name = "datahub_dagster_plugin" diff --git a/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/sensors/__init__.py b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/sensors/__init__.py new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/sensors/datahub_sensors.py b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/sensors/datahub_sensors.py new file mode 100644 index 0000000000000..181ecc7b5c5cd --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/src/datahub_dagster_plugin/sensors/datahub_sensors.py @@ -0,0 +1,439 @@ +import os +import traceback +from typing import Dict, List, Optional, Sequence, Set, Tuple + +from dagster import ( + DagsterRunStatus, + EventLogEntry, + RunStatusSensorContext, + SensorDefinition, + SkipReason, + run_status_sensor, + sensor, +) +from dagster._core.definitions.asset_selection import CoercibleToAssetSelection +from dagster._core.definitions.sensor_definition import ( + DefaultSensorStatus, + RawSensorEvaluationFunctionReturn, +) +from dagster._core.definitions.target import ExecutableDefinition +from dagster._core.events import DagsterEventType, HandledOutputData, LoadedInputData +from dagster._core.execution.stats import RunStepKeyStatsSnapshot +from datahub.emitter.mcp import MetadataChangeProposalWrapper +from datahub.ingestion.graph.client import DataHubGraph +from datahub.metadata.schema_classes import SubTypesClass + +from datahub_dagster_plugin.client.dagster_generator import ( + DagsterEnvironment, + DagsterGenerator, + DatahubDagsterSourceConfig, +) + + +def make_datahub_sensor( + config: DatahubDagsterSourceConfig, + name: Optional[str] = None, + minimum_interval_seconds: Optional[int] = None, + description: Optional[str] = None, + job: Optional[ExecutableDefinition] = None, + jobs: Optional[Sequence[ExecutableDefinition]] = None, + default_status: DefaultSensorStatus = DefaultSensorStatus.STOPPED, + asset_selection: Optional[CoercibleToAssetSelection] = None, + required_resource_keys: Optional[Set[str]] = None, +) -> SensorDefinition: + """Create a sensor on job status change emit lineage to DataHub. + + Args: + config (DatahubDagsterSourceConfig): DataHub Sensor config + name: (Optional[str]): The name of the sensor. Defaults to "datahub_sensor". + minimum_interval_seconds: (Optional[int]): The minimum number of seconds that will elapse + between sensor evaluations. + default_status (DefaultSensorStatus): Whether the sensor starts as running or not. The default + status can be overridden from Dagit or via the GraphQL API. + + Examples: + .. code-block:: python + + datahub_sensor = make_datahub_sensor( + config + ) + + @repository + def my_repo(): + return [my_job + datahub_sensor] + + + """ + + @sensor( + name=name, + minimum_interval_seconds=minimum_interval_seconds, + description=description, + job=job, + jobs=jobs, + default_status=default_status, + asset_selection=asset_selection, + required_resource_keys=required_resource_keys, + ) + def datahub_sensor(context): + """ + Sensor which instigate all run status sensors and trigger them based upon run status + """ + for each in DatahubSensors(config).sensors: + each.evaluate_tick(context) + return SkipReason("Trigger run status sensors if any new runs present...") + + return datahub_sensor + + +class DatahubSensors: + def __init__(self, config: Optional[DatahubDagsterSourceConfig] = None): + """ + Set dagster source configurations and initialize datahub emitter and dagster run status sensors + """ + if config: + self.config = config + else: + self.config = DatahubDagsterSourceConfig() + self.graph = DataHubGraph( + self.config.datahub_client_config, + ) + + self.graph.test_connection() + self.sensors: List[SensorDefinition] = [] + self.sensors.append( + run_status_sensor( + name="datahub_success_sensor", run_status=DagsterRunStatus.SUCCESS + )(self._emit_metadata) + ) + + self.sensors.append( + run_status_sensor( + name="datahub_failure_sensor", run_status=DagsterRunStatus.FAILURE + )(self._emit_metadata) + ) + + self.sensors.append( + run_status_sensor( + name="datahub_canceled_sensor", run_status=DagsterRunStatus.CANCELED + )(self._emit_metadata) + ) + + def get_dagster_environment( + self, context: RunStatusSensorContext + ) -> Optional[DagsterEnvironment]: + if ( + context.dagster_run.job_code_origin + and context.dagster_run.job_code_origin.repository_origin + and context.dagster_run.job_code_origin.repository_origin.code_pointer + ): + + code_pointer = ( + context.dagster_run.job_code_origin.repository_origin.code_pointer + ) + context.log.debug(f"code_pointer: {code_pointer}") + + if hasattr(code_pointer, "attribute"): + repository = code_pointer.attribute + else: + repository = None + + if hasattr(code_pointer, "module"): + module = code_pointer.module + else: + context.log.error("Unable to get Module") + return None + + dagster_environment = DagsterEnvironment( + is_cloud=os.getenv("DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT", None) + is not None, + is_branch_deployment=( + True + if os.getenv("DAGSTER_CLOUD_IS_BRANCH_DEPLOYMENT", False) == 1 + else False + ), + branch=os.getenv("DAGSTER_CLOUD_DEPLOYMENT_NAME", "prod"), + module=module, + repository=repository, + ) + return dagster_environment + else: + context.log.error("Unable to get Dagster Environment...") + return None + + def process_asset_logs( + self, + dagster_generator: DagsterGenerator, + log: EventLogEntry, + dataset_inputs: Dict[str, set], + dataset_outputs: Dict[str, set], + ) -> None: + + if not log.dagster_event or not log.step_key: + return + + if log.dagster_event.event_type == DagsterEventType.ASSET_MATERIALIZATION: + if log.step_key not in dataset_outputs: + dataset_outputs[log.step_key] = set() + + materialization = log.asset_materialization + if not materialization: + return + + properties = { + key: str(value) for (key, value) in materialization.metadata.items() + } + asset_key = materialization.asset_key.path + dataset_urn = dagster_generator.emit_asset( + self.graph, asset_key, materialization.description, properties + ) + dataset_outputs[log.step_key].add(dataset_urn) + + elif log.dagster_event.event_type == DagsterEventType.ASSET_OBSERVATION: + if log.step_key not in dataset_inputs: + dataset_inputs[log.step_key] = set() + asset_observation = log.asset_observation + if not asset_observation: + return + + properties = { + key: str(value) + for (key, value) in asset_observation.metadata.items() # type: ignore + } + asset_key = asset_observation.asset_key.path # type: ignore + dataset_urn = dagster_generator.emit_asset( + self.graph, + asset_key, + asset_observation.description, + properties, # type: ignore + ) + dataset_inputs[log.step_key].add(dataset_urn) + + def process_handle_input_output( + self, + context: RunStatusSensorContext, + log: EventLogEntry, + dagster_generator: DagsterGenerator, + dataset_inputs: Dict[str, set], + dataset_outputs: Dict[str, set], + ) -> None: + if not log.dagster_event or not log.step_key: + return + + if ( + self.config.capture_input_output + and log.dagster_event.event_type == DagsterEventType.HANDLED_OUTPUT + ): + if log.step_key not in dataset_outputs: + dataset_outputs[log.step_key] = set() + + event_specific_data = log.dagster_event.event_specific_data + if isinstance(event_specific_data, HandledOutputData): + context.log.debug( + f"Output Path: {event_specific_data.metadata.get('path')}" + ) + metadata = event_specific_data.metadata.get("path") + context.log.debug(f"Metadata: {metadata}") + if not metadata: + return + urn = dagster_generator.metadata_resolver(metadata) + if urn: + context.log.debug(f"Output Urn: {urn}") + dataset_outputs[log.step_key].add(urn) + elif ( + self.config.capture_input_output + and log.dagster_event.event_type == DagsterEventType.LOADED_INPUT + ): + if log.step_key not in dataset_inputs: + dataset_inputs[log.step_key] = set() + event_specific_data = log.dagster_event.event_specific_data + if isinstance(event_specific_data, LoadedInputData): + context.log.debug( + f"Input Path: {event_specific_data.metadata.get('path')}" + ) + metadata = event_specific_data.metadata.get("path") + context.log.debug(f"Metadata: {metadata}") + if not metadata: + return + urn = dagster_generator.metadata_resolver(metadata) + if urn: + context.log.debug(f"Input Urn: {urn}") + dataset_inputs[log.step_key].add(urn) + + def process_dagster_logs( + self, context: RunStatusSensorContext, dagster_generator: DagsterGenerator + ) -> Tuple[Dict[str, set], Dict[str, set]]: + dataset_outputs: Dict[str, set] = {} + dataset_inputs: Dict[str, set] = {} + + logs = context.instance.all_logs( + context.dagster_run.run_id, + { + DagsterEventType.ASSET_MATERIALIZATION, + DagsterEventType.ASSET_OBSERVATION, + DagsterEventType.HANDLED_OUTPUT, + DagsterEventType.LOADED_INPUT, + }, + ) + + for log in logs: + if not log.dagster_event or not log.step_key: + continue + context.log.debug(f"Log: {log.step_key} - {log.dagster_event}") + context.log.debug(f"Event Type: {log.dagster_event.event_type}") + if self.config.capture_input_output: + self.process_handle_input_output( + context=context, + log=log, + dagster_generator=dagster_generator, + dataset_inputs=dataset_inputs, + dataset_outputs=dataset_outputs, + ) + + if self.config.capture_asset_materialization: + self.process_asset_logs( + dagster_generator=dagster_generator, + log=log, + dataset_inputs=dataset_inputs, + dataset_outputs=dataset_outputs, + ) + + return dataset_inputs, dataset_outputs + + @staticmethod + def merge_dicts(dict1: Dict[str, Set], dict2: Dict[str, Set]) -> Dict[str, Set]: + """ + Merge two dictionaries + """ + for key, value in dict2.items(): + if key in dict1: + dict1[key] = dict1[key].union(value) + else: + dict1[key] = value + return dict1 + + def _emit_metadata( + self, context: RunStatusSensorContext + ) -> RawSensorEvaluationFunctionReturn: + """ + Function to emit metadata for datahub rest. + """ + try: + context.log.info("Emitting metadata...") + + assert context.dagster_run.job_snapshot_id + assert context.dagster_run.execution_plan_snapshot_id + + dagster_environment = self.get_dagster_environment(context) + context.log.debug(f"dagster enivronment: {dagster_environment}") + if not dagster_environment: + return SkipReason( + "Unable to get Dagster Environment from DataHub Sensor" + ) + + context.log.debug(f"Dagster Environment: {dagster_environment}") + + dagster_generator = DagsterGenerator( + logger=context.log, + config=self.config, + dagster_environment=dagster_environment, + ) + + job_snapshot = context.instance.get_job_snapshot( + snapshot_id=context.dagster_run.job_snapshot_id + ) + + dataset_inputs: Dict[str, Set] = {} + dataset_outputs: Dict[str, Set] = {} + + if self.config.asset_lineage_extractor: + asset_lineages = self.config.asset_lineage_extractor( + context, dagster_generator, self.graph + ) + for key, value in asset_lineages.items(): + dataset_inputs[key] = dataset_inputs.get(key, set()).union( + value.inputs + ) + dataset_outputs[key] = dataset_outputs.get(key, set()).union( + value.outputs + ) + + ( + dataset_inputs_from_logs, + dataset_outputs_from_logs, + ) = self.process_dagster_logs(context, dagster_generator) + + dataset_inputs = DatahubSensors.merge_dicts( + dataset_inputs, dataset_inputs_from_logs + ) + dataset_outputs = DatahubSensors.merge_dicts( + dataset_outputs, dataset_outputs_from_logs + ) + + context.log.debug(f"Outputs: {dataset_outputs}") + # Emit dagster job entity which get mapped with datahub dataflow entity + dataflow = dagster_generator.generate_dataflow( + job_snapshot=job_snapshot, + env=self.config.env, + platform_instance=self.config.platform_instance, + ) + dataflow.emit(self.graph) + + # Emit dagster job run which get mapped with datahub data process instance entity + dagster_generator.emit_job_run( + graph=self.graph, + dataflow=dataflow, + run=context.dagster_run, + run_stats=context.instance.get_run_stats(context.dagster_run.run_id), + ) + + # Execution plan snapshot contains all steps(ops) dependency. + execution_plan_snapshot = context.instance.get_execution_plan_snapshot( + snapshot_id=context.dagster_run.execution_plan_snapshot_id + ) + + # Map step key with its run step stats + run_step_stats: Dict[str, RunStepKeyStatsSnapshot] = { + run_step_stat.step_key: run_step_stat + for run_step_stat in context.instance.get_run_step_stats( + context.dagster_run.run_id + ) + } + + # For all dagster ops present in job: + # Emit op entity which get mapped with datahub datajob entity. + # Emit op run which get mapped with datahub data process instance entity. + for op_def_snap in job_snapshot.node_defs_snapshot.op_def_snaps: + datajob = dagster_generator.generate_datajob( + job_snapshot=job_snapshot, + step_deps=execution_plan_snapshot.step_deps, + op_def_snap=op_def_snap, + env=self.config.env, + platform_instance=self.config.platform_instance, + output_datasets=dataset_outputs, + input_datasets=dataset_inputs, + ) + context.log.info(f"Generated Datajob: {datajob}") + datajob.emit(self.graph) + + self.graph.emit_mcp( + mcp=MetadataChangeProposalWrapper( + entityUrn=str(datajob.urn), + aspect=SubTypesClass( + typeNames=["Op"], + ), + ) + ) + + dagster_generator.emit_op_run( + graph=self.graph, + datajob=datajob, + run_step_stats=run_step_stats[op_def_snap.name], + ) + + return SkipReason("Pipeline metadata is emitted to DataHub") + except Exception as e: + context.log.error( + f"Error in emitting metadata to DataHub: {e}. Traceback: {traceback.format_exc()}" + ) + return SkipReason("Error in emitting metadata to DataHub") diff --git a/metadata-ingestion-modules/dagster-plugin/tests/integration/integration_test_dummy.py b/metadata-ingestion-modules/dagster-plugin/tests/integration/integration_test_dummy.py new file mode 100644 index 0000000000000..10cf3ad0a608a --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/tests/integration/integration_test_dummy.py @@ -0,0 +1,2 @@ +def test_dummy(): + pass diff --git a/metadata-ingestion-modules/dagster-plugin/tests/unit/test_dagster.py b/metadata-ingestion-modules/dagster-plugin/tests/unit/test_dagster.py new file mode 100644 index 0000000000000..ac46cfd86fbb9 --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/tests/unit/test_dagster.py @@ -0,0 +1,303 @@ +from unittest.mock import Mock, patch + +import pytest +from dagster import ( + DagsterInstance, + In, + Out, + SkipReason, + build_run_status_sensor_context, + build_sensor_context, + job, + op, +) +from datahub.api.entities.dataprocess.dataprocess_instance import ( + DataProcessInstanceKey, + InstanceRunResult, +) +from datahub.configuration.source_common import DEFAULT_ENV +from datahub.ingestion.graph.client import DatahubClientConfig + +from datahub_dagster_plugin.client.dagster_generator import DatahubDagsterSourceConfig +from datahub_dagster_plugin.sensors.datahub_sensors import ( + DatahubSensors, + make_datahub_sensor, +) + + +@patch("datahub.ingestion.graph.client.DataHubGraph", autospec=True) +@pytest.mark.skip(reason="disabling this test unti it will use proper golden files") +def test_datahub_sensor(mock_emit): + instance = DagsterInstance.ephemeral() + context = build_sensor_context(instance=instance) + mock_emit.return_value = Mock() + + config = DatahubDagsterSourceConfig( + datahub_client_config=DatahubClientConfig( + server="http://localhost:8081", + ), + dagster_url="http://localhost:3000", + ) + + datahub_sensor = make_datahub_sensor(config) + skip_reason = datahub_sensor(context) + assert isinstance(skip_reason, SkipReason) + + +@patch("datahub_dagster_plugin.sensors.datahub_sensors.DatahubClient", autospec=True) +@pytest.mark.skip(reason="disabling this test unti it will use proper golden files") +def test_emit_metadata(mock_emit): + mock_emitter = Mock() + mock_emit.return_value = mock_emitter + + @op( + out={ + "result": Out( + metadata={ + "datahub.outputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,tableB,PROD)" + ] + } + ) + } + ) + def extract(): + results = [1, 2, 3, 4] + return results + + @op( + ins={ + "data": In( + metadata={ + "datahub.inputs": [ + "urn:li:dataset:(urn:li:dataPlatform:snowflake,tableA,PROD)" + ] + } + ) + } + ) + def transform(data): + results = [] + for each in data: + results.append(str(each)) + return results + + @job + def etl(): + transform(extract()) + + instance = DagsterInstance.ephemeral() + result = etl.execute_in_process(instance=instance) + + # retrieve the DagsterRun + dagster_run = result.dagster_run + + # retrieve a success event from the completed execution + dagster_event = result.get_job_success_event() + + # create the context + run_status_sensor_context = build_run_status_sensor_context( + sensor_name="my_email_sensor", + dagster_instance=instance, + dagster_run=dagster_run, + dagster_event=dagster_event, + ) + + DatahubSensors()._emit_metadata(run_status_sensor_context) + + expected_dataflow_urn = ( + f"urn:li:dataFlow:(dagster,{dagster_run.job_name},{DEFAULT_ENV})" + ) + assert mock_emitter.method_calls[1][1][0].aspectName == "dataFlowInfo" + assert mock_emitter.method_calls[1][1][0].entityUrn == expected_dataflow_urn + assert mock_emitter.method_calls[2][1][0].aspectName == "ownership" + assert mock_emitter.method_calls[2][1][0].entityUrn == expected_dataflow_urn + assert mock_emitter.method_calls[3][1][0].aspectName == "globalTags" + assert mock_emitter.method_calls[3][1][0].entityUrn == expected_dataflow_urn + + dpi_id = DataProcessInstanceKey( + cluster=DEFAULT_ENV, + orchestrator="dagster", + id=dagster_run.run_id, + ).guid() + assert ( + mock_emitter.method_calls[7][1][0].aspectName == "dataProcessInstanceProperties" + ) + assert ( + mock_emitter.method_calls[7][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[8][1][0].aspectName + == "dataProcessInstanceRelationships" + ) + assert ( + mock_emitter.method_calls[8][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[9][1][0].aspectName == "dataProcessInstanceRunEvent" + ) + assert ( + mock_emitter.method_calls[9][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[10][1][0].aspectName == "dataProcessInstanceRunEvent" + ) + assert ( + mock_emitter.method_calls[10][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[10][1][0].aspect.result.type + == InstanceRunResult.SUCCESS + ) + assert mock_emitter.method_calls[11][1][0].aspectName == "dataJobInfo" + assert ( + mock_emitter.method_calls[11][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},extract)" + ) + assert mock_emitter.method_calls[12][1][0].aspectName == "dataJobInputOutput" + assert ( + mock_emitter.method_calls[12][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},extract)" + ) + assert mock_emitter.method_calls[13][1][0].aspectName == "status" + assert ( + mock_emitter.method_calls[13][1][0].entityUrn + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,tableB,PROD)" + ) + assert mock_emitter.method_calls[14][1][0].aspectName == "ownership" + assert ( + mock_emitter.method_calls[14][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},extract)" + ) + assert mock_emitter.method_calls[15][1][0].aspectName == "globalTags" + assert ( + mock_emitter.method_calls[15][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},extract)" + ) + dpi_id = DataProcessInstanceKey( + cluster=DEFAULT_ENV, + orchestrator="dagster", + id=f"{dagster_run.run_id}.extract", + ).guid() + assert ( + mock_emitter.method_calls[21][1][0].aspectName + == "dataProcessInstanceProperties" + ) + assert ( + mock_emitter.method_calls[21][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[22][1][0].aspectName + == "dataProcessInstanceRelationships" + ) + assert ( + mock_emitter.method_calls[22][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert mock_emitter.method_calls[23][1][0].aspectName == "dataProcessInstanceOutput" + assert ( + mock_emitter.method_calls[23][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert mock_emitter.method_calls[24][1][0].aspectName == "status" + assert ( + mock_emitter.method_calls[24][1][0].entityUrn + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,tableB,PROD)" + ) + assert ( + mock_emitter.method_calls[25][1][0].aspectName == "dataProcessInstanceRunEvent" + ) + assert ( + mock_emitter.method_calls[25][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[26][1][0].aspectName == "dataProcessInstanceRunEvent" + ) + assert ( + mock_emitter.method_calls[26][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[26][1][0].aspect.result.type + == InstanceRunResult.SUCCESS + ) + assert mock_emitter.method_calls[27][1][0].aspectName == "dataJobInfo" + assert ( + mock_emitter.method_calls[27][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},transform)" + ) + assert mock_emitter.method_calls[28][1][0].aspectName == "dataJobInputOutput" + assert ( + mock_emitter.method_calls[28][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},transform)" + ) + assert mock_emitter.method_calls[29][1][0].aspectName == "status" + assert ( + mock_emitter.method_calls[29][1][0].entityUrn + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,tableA,PROD)" + ) + assert mock_emitter.method_calls[30][1][0].aspectName == "ownership" + assert ( + mock_emitter.method_calls[30][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},transform)" + ) + assert mock_emitter.method_calls[31][1][0].aspectName == "globalTags" + assert ( + mock_emitter.method_calls[31][1][0].entityUrn + == f"urn:li:dataJob:({expected_dataflow_urn},transform)" + ) + dpi_id = DataProcessInstanceKey( + cluster=DEFAULT_ENV, + orchestrator="dagster", + id=f"{dagster_run.run_id}.transform", + ).guid() + assert ( + mock_emitter.method_calls[37][1][0].aspectName + == "dataProcessInstanceProperties" + ) + assert ( + mock_emitter.method_calls[37][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[38][1][0].aspectName + == "dataProcessInstanceRelationships" + ) + assert ( + mock_emitter.method_calls[38][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert mock_emitter.method_calls[39][1][0].aspectName == "dataProcessInstanceInput" + assert ( + mock_emitter.method_calls[39][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert mock_emitter.method_calls[40][1][0].aspectName == "status" + assert ( + mock_emitter.method_calls[40][1][0].entityUrn + == "urn:li:dataset:(urn:li:dataPlatform:snowflake,tableA,PROD)" + ) + assert ( + mock_emitter.method_calls[41][1][0].aspectName == "dataProcessInstanceRunEvent" + ) + assert ( + mock_emitter.method_calls[41][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[42][1][0].aspectName == "dataProcessInstanceRunEvent" + ) + assert ( + mock_emitter.method_calls[42][1][0].entityUrn + == f"urn:li:dataProcessInstance:{dpi_id}" + ) + assert ( + mock_emitter.method_calls[42][1][0].aspect.result.type + == InstanceRunResult.SUCCESS + ) diff --git a/metadata-ingestion-modules/dagster-plugin/tests/unit/test_dummy.py b/metadata-ingestion-modules/dagster-plugin/tests/unit/test_dummy.py new file mode 100644 index 0000000000000..10cf3ad0a608a --- /dev/null +++ b/metadata-ingestion-modules/dagster-plugin/tests/unit/test_dummy.py @@ -0,0 +1,2 @@ +def test_dummy(): + pass diff --git a/metadata-ingestion/developing.md b/metadata-ingestion/developing.md index 47e325171ddcc..9b4b4c56339b9 100644 --- a/metadata-ingestion/developing.md +++ b/metadata-ingestion/developing.md @@ -35,7 +35,16 @@ cd metadata-ingestion-modules/airflow-plugin source venv/bin/activate datahub version # should print "DataHub CLI version: unavailable (installed in develop mode)" ``` +### (Optional) Set up your Python environment for developing on Dagster Plugin +From the repository root: + +```shell +cd metadata-ingestion-modules/dagster-plugin +../../gradlew :metadata-ingestion-modules:dagster-plugin:installDev +source venv/bin/activate +datahub version # should print "DataHub CLI version: unavailable (installed in develop mode)" +``` ### Common setup issues Common issues (click to expand): diff --git a/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py b/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py index a4b3779b73803..7a807bde2ed0a 100644 --- a/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py +++ b/metadata-ingestion/src/datahub/ingestion/source/data_lake_common/path_spec.py @@ -45,6 +45,7 @@ class Config: ) default_extension: Optional[str] = Field( + default=None, description="For files without extension it will assume the specified file type. If it is not set the files without extensions will be skipped.", ) diff --git a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json index f54c62865bcde..d9a5b8b4a7eb8 100644 --- a/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json +++ b/metadata-ingestion/tests/integration/s3/golden-files/local/golden_mces_single_file.json @@ -18,7 +18,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -111,7 +112,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -132,7 +134,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -152,7 +155,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -167,7 +171,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -182,7 +187,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -199,7 +205,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -214,7 +221,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -234,7 +242,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -249,7 +258,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -264,7 +274,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -281,7 +292,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -296,7 +308,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -316,7 +329,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -336,7 +350,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -351,7 +366,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -366,7 +382,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -383,7 +400,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -398,7 +416,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -422,7 +441,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -442,7 +462,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -457,7 +478,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -472,7 +494,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -489,7 +512,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -504,7 +528,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -532,7 +557,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -552,7 +578,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -567,7 +594,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -582,7 +610,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -599,7 +628,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -614,7 +644,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -646,7 +677,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -666,7 +698,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -681,7 +714,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -696,7 +730,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -713,7 +748,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -728,7 +764,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -764,7 +801,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -784,7 +822,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -799,7 +838,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -814,7 +854,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -831,7 +872,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -846,7 +888,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -886,7 +929,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -906,7 +950,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -921,7 +966,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -936,7 +982,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -953,7 +1000,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -968,7 +1016,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -1012,7 +1061,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -1027,7 +1077,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -1377,7 +1428,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -1392,7 +1444,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } }, { @@ -1440,7 +1493,8 @@ }, "systemMetadata": { "lastObserved": 1615443388097, - "runId": "single_file.json" + "runId": "single_file.json", + "lastRunId": "no-run-id-provided" } } ] \ No newline at end of file diff --git a/settings.gradle b/settings.gradle index ade0c818b130d..f553bf97ec14b 100644 --- a/settings.gradle +++ b/settings.gradle @@ -59,6 +59,7 @@ include 'metadata-integration:java:openlineage-converter' include 'metadata-integration:java:spark-lineage-beta' include 'ingestion-scheduler' include 'metadata-ingestion-modules:airflow-plugin' +include 'metadata-ingestion-modules:dagster-plugin' include 'smoke-test' include 'metadata-auth:auth-api' include 'metadata-service:schema-registry-api' From c8a38188f88a5d082a2fdc15b6232e4d8d93bade Mon Sep 17 00:00:00 2001 From: Chris Collins Date: Mon, 25 Mar 2024 08:37:51 -0700 Subject: [PATCH 18/18] fix(forms) Fix a couple of small inconsistencies with forms (#9928) --- .../profile/stats/stats/DatasetStatsSummarySubHeader.tsx | 3 ++- .../src/app/entity/dataset/shared/DatasetStatsSummary.tsx | 4 +++- .../app/entity/shared/components/styled/StatsSummary.tsx | 8 +++++--- .../containers/profile/sidebar/EntityInfo/EntityInfo.tsx | 4 ++-- datahub-web-react/src/app/onboarding/OnboardingStep.tsx | 1 + datahub-web-react/src/app/onboarding/utils.tsx | 5 +++++ .../src/providers/EducationStepsProvider.tsx | 5 ++--- 7 files changed, 20 insertions(+), 10 deletions(-) diff --git a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx index c1e2c1aa298b6..bbac8ecd2c61e 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx @@ -5,7 +5,7 @@ import { GetDatasetQuery } from '../../../../../../graphql/dataset.generated'; import { DatasetStatsSummary } from '../../../shared/DatasetStatsSummary'; import { getLastUpdatedMs } from '../../../shared/utils'; -export const DatasetStatsSummarySubHeader = () => { +export const DatasetStatsSummarySubHeader = ({ properties }: { properties?: any }) => { const result = useBaseEntity(); const dataset = result?.dataset; @@ -31,6 +31,7 @@ export const DatasetStatsSummarySubHeader = () => { queryCountLast30Days={queryCountLast30Days} uniqueUserCountLast30Days={uniqueUserCountLast30Days} lastUpdatedMs={lastUpdatedMs} + shouldWrap={properties?.shouldWrap} /> ); }; diff --git a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx index 1a5c01df5bde2..8e9102145cfa5 100644 --- a/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx +++ b/datahub-web-react/src/app/entity/dataset/shared/DatasetStatsSummary.tsx @@ -30,6 +30,7 @@ type Props = { lastUpdatedMs?: number | null; color?: string; mode?: 'normal' | 'tooltip-content'; + shouldWrap?: boolean; }; export const DatasetStatsSummary = ({ @@ -42,6 +43,7 @@ export const DatasetStatsSummary = ({ lastUpdatedMs, color, mode = 'normal', + shouldWrap, }: Props) => { const isTooltipMode = mode === 'tooltip-content'; const displayedColor = isTooltipMode ? '' : color ?? ANTD_GRAY[7]; @@ -105,5 +107,5 @@ export const DatasetStatsSummary = ({ ), ].filter((stat) => stat); - return <>{statsViews.length > 0 && }; + return <>{statsViews.length > 0 && }; }; diff --git a/datahub-web-react/src/app/entity/shared/components/styled/StatsSummary.tsx b/datahub-web-react/src/app/entity/shared/components/styled/StatsSummary.tsx index a0fe5ef031bb7..9694fb631f9b0 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/StatsSummary.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/StatsSummary.tsx @@ -4,12 +4,14 @@ import { ANTD_GRAY } from '../../constants'; type Props = { stats: Array; + shouldWrap?: boolean; }; -const StatsContainer = styled.div` +const StatsContainer = styled.div<{ shouldWrap?: boolean }>` margin-top: 8px; display: flex; align-items: center; + ${(props) => props.shouldWrap && `flex-wrap: wrap;`} `; const StatDivider = styled.div` @@ -19,11 +21,11 @@ const StatDivider = styled.div` height: 21px; `; -export const StatsSummary = ({ stats }: Props) => { +export const StatsSummary = ({ stats, shouldWrap }: Props) => { return ( <> {stats && stats.length > 0 && ( - + {stats.map((statView, index) => ( <> {statView} diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx index 02dc70dc157bd..1c09b5291fd7e 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/sidebar/EntityInfo/EntityInfo.tsx @@ -50,13 +50,13 @@ export default function EntityInfo({ formUrn }: Props) { {entityName} View Profile - + diff --git a/datahub-web-react/src/app/onboarding/OnboardingStep.tsx b/datahub-web-react/src/app/onboarding/OnboardingStep.tsx index 08bad906724f0..cb9d2ad900768 100644 --- a/datahub-web-react/src/app/onboarding/OnboardingStep.tsx +++ b/datahub-web-react/src/app/onboarding/OnboardingStep.tsx @@ -10,4 +10,5 @@ export type OnboardingStep = { content?: ReactNode; selector?: string; style?: any; + isActionStep?: boolean; // hide this step until some action is taken to display it }; diff --git a/datahub-web-react/src/app/onboarding/utils.tsx b/datahub-web-react/src/app/onboarding/utils.tsx index 84bb7a0913038..5237474e9f7e8 100644 --- a/datahub-web-react/src/app/onboarding/utils.tsx +++ b/datahub-web-react/src/app/onboarding/utils.tsx @@ -86,3 +86,8 @@ export function getStepsToRender( ), })); } + +// filter out action steps from the initial steps that should be shown +export function getInitialAllowListIds() { + return OnboardingConfig.filter((config) => !config.isActionStep).map((config) => config.id as string); +} diff --git a/datahub-web-react/src/providers/EducationStepsProvider.tsx b/datahub-web-react/src/providers/EducationStepsProvider.tsx index 28dc6b91e0e82..f254b21ea99a5 100644 --- a/datahub-web-react/src/providers/EducationStepsProvider.tsx +++ b/datahub-web-react/src/providers/EducationStepsProvider.tsx @@ -1,9 +1,8 @@ import React, { useEffect, useState } from 'react'; -import { getStepIds } from '../app/onboarding/utils'; +import { getInitialAllowListIds, getStepIds } from '../app/onboarding/utils'; import { useBatchGetStepStatesQuery } from '../graphql/step.generated'; import { EducationStepsContext } from './EducationStepsContext'; import { StepStateResult } from '../types.generated'; -import { CURRENT_ONBOARDING_IDS } from '../app/onboarding/OnboardingConfig'; import { useUserContext } from '../app/context/useUserContext'; export function EducationStepsProvider({ children }: { children: React.ReactNode }) { @@ -13,7 +12,7 @@ export function EducationStepsProvider({ children }: { children: React.ReactNode const results = data?.batchGetStepStates.results; const [educationSteps, setEducationSteps] = useState(results || null); const [educationStepIdsAllowlist, setEducationStepIdsAllowlist] = useState>( - new Set(CURRENT_ONBOARDING_IDS), + new Set(getInitialAllowListIds()), ); useEffect(() => {