Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
anshbansal authored Nov 26, 2024
2 parents cc809a6 + 9f9a8b1 commit 1aa5ba4
Show file tree
Hide file tree
Showing 18 changed files with 275 additions and 50 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@
import com.linkedin.datahub.graphql.generated.Domain;
import com.linkedin.datahub.graphql.generated.ERModelRelationship;
import com.linkedin.datahub.graphql.generated.ERModelRelationshipProperties;
import com.linkedin.datahub.graphql.generated.Entity;
import com.linkedin.datahub.graphql.generated.EntityPath;
import com.linkedin.datahub.graphql.generated.EntityRelationship;
import com.linkedin.datahub.graphql.generated.EntityRelationshipLegacy;
Expand Down Expand Up @@ -312,6 +313,7 @@
import com.linkedin.datahub.graphql.resolvers.type.HyperParameterValueTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.PlatformSchemaUnionTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.PropertyValueResolver;
import com.linkedin.datahub.graphql.resolvers.type.ResolvedActorResolver;
import com.linkedin.datahub.graphql.resolvers.type.ResultsTypeResolver;
import com.linkedin.datahub.graphql.resolvers.type.TimeSeriesAspectInterfaceTypeResolver;
import com.linkedin.datahub.graphql.resolvers.user.CreateNativeUserResetTokenResolver;
Expand Down Expand Up @@ -1730,12 +1732,22 @@ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) {
.type(
"InstitutionalMemoryMetadata",
typeWiring ->
typeWiring.dataFetcher(
"author",
new LoadableTypeResolver<>(
corpUserType,
(env) ->
((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())))
typeWiring
.dataFetcher(
"author",
new LoadableTypeResolver<>(
corpUserType,
(env) ->
((InstitutionalMemoryMetadata) env.getSource())
.getAuthor()
.getUrn()))
.dataFetcher(
"actor",
new EntityTypeResolver(
this.entityTypes,
(env) ->
(Entity)
((InstitutionalMemoryMetadata) env.getSource()).getActor())))
.type(
"DatasetStatsSummary",
typeWiring ->
Expand Down Expand Up @@ -2242,6 +2254,7 @@ private void configureTypeResolvers(final RuntimeWiring.Builder builder) {
"HyperParameterValueType",
typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver()))
.type("PropertyValue", typeWiring -> typeWiring.typeResolver(new PropertyValueResolver()))
.type("ResolvedActor", typeWiring -> typeWiring.typeResolver(new ResolvedActorResolver()))
.type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver()))
.type(
"TimeSeriesAspect",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
package com.linkedin.datahub.graphql.resolvers.type;

import com.linkedin.datahub.graphql.generated.CorpGroup;
import com.linkedin.datahub.graphql.generated.CorpUser;
import graphql.TypeResolutionEnvironment;
import graphql.schema.GraphQLObjectType;
import graphql.schema.TypeResolver;

public class ResolvedActorResolver implements TypeResolver {

public static final String CORP_USER = "CorpUser";
public static final String CORP_GROUP = "CorpGroup";

@Override
public GraphQLObjectType getType(TypeResolutionEnvironment env) {
if (env.getObject() instanceof CorpUser) {
return env.getSchema().getObjectType(CORP_USER);
} else if (env.getObject() instanceof CorpGroup) {
return env.getSchema().getObjectType(CORP_GROUP);
} else {
throw new RuntimeException(
"Unrecognized object type provided to type resolver, Type:" + env.getObject().toString());
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ public InstitutionalMemoryMetadata apply(
result.setDescription(input.getDescription()); // deprecated field
result.setLabel(input.getDescription());
result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString()));
result.setActor(ResolvedActorMapper.map(input.getCreateStamp().getActor()));
result.setCreated(AuditStampMapper.map(context, input.getCreateStamp()));
result.setAssociatedUrn(entityUrn.toString());
return result;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package com.linkedin.datahub.graphql.types.common.mappers;

import com.linkedin.common.urn.Urn;
import com.linkedin.datahub.graphql.generated.CorpGroup;
import com.linkedin.datahub.graphql.generated.CorpUser;
import com.linkedin.datahub.graphql.generated.EntityType;
import com.linkedin.datahub.graphql.generated.ResolvedActor;
import com.linkedin.metadata.Constants;
import javax.annotation.Nonnull;

public class ResolvedActorMapper {

public static final ResolvedActorMapper INSTANCE = new ResolvedActorMapper();

public static ResolvedActor map(@Nonnull final Urn actorUrn) {
return INSTANCE.apply(actorUrn);
}

public ResolvedActor apply(@Nonnull final Urn actorUrn) {
if (actorUrn.getEntityType().equals(Constants.CORP_GROUP_ENTITY_NAME)) {
CorpGroup partialGroup = new CorpGroup();
partialGroup.setUrn(actorUrn.toString());
partialGroup.setType(EntityType.CORP_GROUP);
return partialGroup;
}
CorpUser partialUser = new CorpUser();
partialUser.setUrn(actorUrn.toString());
partialUser.setType(EntityType.CORP_USER);
return (ResolvedActor) partialUser;
}
}
10 changes: 9 additions & 1 deletion datahub-graphql-core/src/main/resources/entity.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -3005,8 +3005,14 @@ type InstitutionalMemoryMetadata {

"""
The author of this metadata
Deprecated! Use actor instead for users or groups.
"""
author: CorpUser!
author: CorpUser! @deprecated(reason: "Use `actor`")

"""
The author of this metadata
"""
actor: ResolvedActor!

"""
An AuditStamp corresponding to the creation of this resource
Expand Down Expand Up @@ -3834,6 +3840,8 @@ enum CorpUserStatus {
ACTIVE
}

union ResolvedActor = CorpUser | CorpGroup

"""
A DataHub User entity, which represents a Person on the Metadata Entity Graph
"""
Expand Down
6 changes: 6 additions & 0 deletions datahub-web-react/src/Mocks.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -566,6 +566,12 @@ export const dataset3 = {
username: 'datahub',
type: EntityType.CorpUser,
},
actor: {
__typename: 'CorpUser',
urn: 'urn:li:corpuser:datahub',
username: 'datahub',
type: EntityType.CorpUser,
},
description: 'This only points to Google',
label: 'This only points to Google',
created: {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ export default function LinkButton({ link }: Props) {
href={link.url}
target="_blank"
rel="noreferrer"
key={`${link.label}-${link.url}-${link.author}`}
key={`${link.label}-${link.url}-${link.actor.urn}`}
>
<LinkOutlined />
{link.description || link.label}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ import { Link } from 'react-router-dom';
import styled from 'styled-components/macro';
import { message, Button, List, Typography, Modal, Form, Input } from 'antd';
import { LinkOutlined, DeleteOutlined, EditOutlined } from '@ant-design/icons';
import { EntityType, InstitutionalMemoryMetadata } from '../../../../../../types.generated';
import { InstitutionalMemoryMetadata } from '../../../../../../types.generated';
import { useEntityData, useMutationUrn } from '../../../EntityContext';
import { useEntityRegistry } from '../../../../../useEntityRegistry';
import { ANTD_GRAY } from '../../../constants';
Expand Down Expand Up @@ -182,10 +182,8 @@ export const LinkList = ({ refetch }: LinkListProps) => {
description={
<>
Added {formatDateString(link.created.time)} by{' '}
<Link
to={`${entityRegistry.getEntityUrl(EntityType.CorpUser, link.author.urn)}`}
>
{link.author.username}
<Link to={`${entityRegistry.getEntityUrl(link.actor.type, link.actor.urn)}`}>
{entityRegistry.getDisplayName(link.actor.type, link.actor)}
</Link>
</>
}
Expand Down
1 change: 1 addition & 0 deletions datahub-web-react/src/graphql-mock/mutationHelper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ export const updateEntityLink = ({ entity, institutionalMemory }: UpdateEntityLi
description: e.description as string,
label: e.description as string,
author: { urn: e.author, username: '', type: EntityType.CorpUser },
actor: { urn: e.author, username: '', type: EntityType.CorpUser },
created: { time: Date.now(), actor: getActor(), __typename: 'AuditStamp' },
associatedUrn: dataEntity.urn,
};
Expand Down
5 changes: 2 additions & 3 deletions datahub-web-react/src/graphql/domain.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,8 @@ query getDomain($urn: String!) {
institutionalMemory {
elements {
url
author {
urn
username
actor {
...resolvedActorFields
}
description
created {
Expand Down
16 changes: 13 additions & 3 deletions datahub-web-react/src/graphql/fragments.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -202,12 +202,22 @@ fragment embedFields on Embed {
renderUrl
}

fragment resolvedActorFields on ResolvedActor {
... on CorpUser {
urn
...entityDisplayNameFields
}
... on CorpGroup {
urn
...entityDisplayNameFields
}
}

fragment institutionalMemoryFields on InstitutionalMemory {
elements {
url
author {
urn
username
actor {
...resolvedActorFields
}
description
created {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@

OL_SCHEME_TWEAKS = {
"sqlserver": "mssql",
"trino": "presto",
"awsathena": "athena",
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -95,6 +95,10 @@ def cleanup(config: BigQueryV2Config) -> None:
"Optionally enabled via `classification.enabled`",
supported=True,
)
@capability(
SourceCapability.PARTITION_SUPPORT,
"Enabled by default, partition keys and clustering keys are supported.",
)
class BigqueryV2Source(StatefulIngestionSourceBase, TestableSource):
def __init__(self, ctx: PipelineContext, config: BigQueryV2Config):
super().__init__(config, ctx)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -643,8 +643,11 @@ def create(
cls, d: dict, default_schema_map: Optional[Dict[str, str]] = None
) -> "TableauUpstreamReference":
# Values directly from `table` object from Tableau
database = t_database = d.get(c.DATABASE, {}).get(c.NAME)
database_id = d.get(c.DATABASE, {}).get(c.ID)
database_dict = (
d.get(c.DATABASE) or {}
) # this sometimes is None, so we need the `or {}`
database = t_database = database_dict.get(c.NAME)
database_id = database_dict.get(c.ID)
schema = t_schema = d.get(c.SCHEMA)
table = t_table = d.get(c.NAME) or ""
t_full_name = d.get(c.FULL_NAME)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
Expand Down Expand Up @@ -70,6 +71,7 @@ private static Stream<MCPItem> generatePatchRemove(
log.error("Unable to process data product properties for urn: {}", mclItem.getUrn());
return Stream.empty();
}
Map<String, List<GenericJsonPatch.PatchOp>> patchOpMap = new HashMap<>();
for (DataProductAssociation dataProductAssociation :
Optional.ofNullable(dataProductProperties.getAssets())
.orElse(new DataProductAssociationArray())) {
Expand All @@ -93,40 +95,45 @@ private static Stream<MCPItem> generatePatchRemove(
if (!result.getEntities().isEmpty()) {
for (RelatedEntities entity : result.getEntities()) {
if (!mclItem.getUrn().equals(UrnUtils.getUrn(entity.getSourceUrn()))) {
EntitySpec entitySpec =
retrieverContext
.getAspectRetriever()
.getEntityRegistry()
.getEntitySpec(DATA_PRODUCT_ENTITY_NAME);
GenericJsonPatch.PatchOp patchOp = new GenericJsonPatch.PatchOp();
patchOp.setOp(PatchOperationType.REMOVE.getValue());
patchOp.setPath(String.format("/assets/%s", entity.getDestinationUrn()));
mcpItems.add(
PatchItemImpl.builder()
.urn(UrnUtils.getUrn(entity.getSourceUrn()))
.entitySpec(
retrieverContext
.getAspectRetriever()
.getEntityRegistry()
.getEntitySpec(DATA_PRODUCT_ENTITY_NAME))
.aspectName(DATA_PRODUCT_PROPERTIES_ASPECT_NAME)
.aspectSpec(entitySpec.getAspectSpec(DATA_PRODUCT_PROPERTIES_ASPECT_NAME))
.patch(
GenericJsonPatch.builder()
.arrayPrimaryKeys(
Map.of(
DataProductPropertiesTemplate.ASSETS_FIELD_NAME,
List.of(DataProductPropertiesTemplate.KEY_FIELD_NAME)))
.patch(List.of(patchOp))
.build()
.getJsonPatch())
.auditStamp(mclItem.getAuditStamp())
.systemMetadata(mclItem.getSystemMetadata())
.build(retrieverContext.getAspectRetriever().getEntityRegistry()));
patchOpMap
.computeIfAbsent(entity.getSourceUrn(), urn -> new ArrayList<>())
.add(patchOp);
}
}
}
}
for (String urn : patchOpMap.keySet()) {
EntitySpec entitySpec =
retrieverContext
.getAspectRetriever()
.getEntityRegistry()
.getEntitySpec(DATA_PRODUCT_ENTITY_NAME);
mcpItems.add(
PatchItemImpl.builder()
.urn(UrnUtils.getUrn(urn))
.entitySpec(
retrieverContext
.getAspectRetriever()
.getEntityRegistry()
.getEntitySpec(DATA_PRODUCT_ENTITY_NAME))
.aspectName(DATA_PRODUCT_PROPERTIES_ASPECT_NAME)
.aspectSpec(entitySpec.getAspectSpec(DATA_PRODUCT_PROPERTIES_ASPECT_NAME))
.patch(
GenericJsonPatch.builder()
.arrayPrimaryKeys(
Map.of(
DataProductPropertiesTemplate.ASSETS_FIELD_NAME,
List.of(DataProductPropertiesTemplate.KEY_FIELD_NAME)))
.patch(patchOpMap.get(urn))
.build()
.getJsonPatch())
.auditStamp(mclItem.getAuditStamp())
.systemMetadata(mclItem.getSystemMetadata())
.build(retrieverContext.getAspectRetriever().getEntityRegistry()));
}
return mcpItems.stream();
}
return Stream.empty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -425,7 +426,10 @@ public List<String> getBrowsePaths(
if (!sourceMap.containsKey(BROWSE_PATH)) {
return Collections.emptyList();
}
return (List<String>) sourceMap.get(BROWSE_PATH);
List<String> browsePaths =
((List<String>) sourceMap.get(BROWSE_PATH))
.stream().filter(Objects::nonNull).collect(Collectors.toList());
return browsePaths;
}

public BrowseResultV2 browseV2(
Expand Down
Loading

0 comments on commit 1aa5ba4

Please sign in to comment.