Skip to content

Commit

Permalink
Merge branch 'datahub-project:master' into master
Browse files Browse the repository at this point in the history
  • Loading branch information
treff7es authored Mar 25, 2024
2 parents e72908e + c8a3818 commit 1b07542
Show file tree
Hide file tree
Showing 524 changed files with 10,148 additions and 4,871 deletions.
2 changes: 2 additions & 0 deletions .github/actions/ci-optimization/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ runs:
- "metadata-jobs/**"
- "metadata-service/**"
- "metadata-utils/**"
- "metadata-operation-context/**"
- "datahub-graphql-core/**"
- "smoke-test/**"
- "docker/**"
kafka-setup:
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,8 @@ jobs:
-x :metadata-io:test \
-x :metadata-ingestion-modules:airflow-plugin:build \
-x :metadata-ingestion-modules:airflow-plugin:check \
-x :metadata-ingestion-modules:dagster-plugin:build \
-x :metadata-ingestion-modules:dagster-plugin:check \
-x :datahub-frontend:build \
-x :datahub-web-react:build \
--parallel
Expand Down
85 changes: 85 additions & 0 deletions .github/workflows/dagster-plugin.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
name: Dagster Plugin
on:
push:
branches:
- master
paths:
- ".github/workflows/dagster-plugin.yml"
- "metadata-ingestion-modules/dagster-plugin/**"
- "metadata-ingestion/**"
- "metadata-models/**"
pull_request:
branches:
- master
paths:
- ".github/**"
- "metadata-ingestion-modules/dagster-plugin/**"
- "metadata-ingestion/**"
- "metadata-models/**"
release:
types: [published]

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

jobs:
dagster-plugin:
runs-on: ubuntu-latest
env:
SPARK_VERSION: 3.0.3
DATAHUB_TELEMETRY_ENABLED: false
strategy:
matrix:
python-version: ["3.8", "3.10"]
include:
- python-version: "3.8"
extraPythonRequirement: "dagster>=1.3.3"
- python-version: "3.10"
extraPythonRequirement: "dagster>=1.3.3"
fail-fast: false
steps:
- name: Set up JDK 17
uses: actions/setup-java@v3
with:
distribution: "zulu"
java-version: 17
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
cache: "pip"
- name: Install dependencies
run: ./metadata-ingestion/scripts/install_deps.sh
- name: Install dagster package and test (extras ${{ matrix.extraPythonRequirement }})
run: ./gradlew -Pextra_pip_requirements='${{ matrix.extraPythonRequirement }}' :metadata-ingestion-modules:dagster-plugin:lint :metadata-ingestion-modules:dagster-plugin:testQuick
- name: pip freeze show list installed
if: always()
run: source metadata-ingestion-modules/dagster-plugin/venv/bin/activate && pip freeze
- uses: actions/upload-artifact@v3
if: ${{ always() && matrix.python-version == '3.10' && matrix.extraPythonRequirement == 'dagster>=1.3.3' }}
with:
name: Test Results (dagster Plugin ${{ matrix.python-version}})
path: |
**/build/reports/tests/test/**
**/build/test-results/test/**
**/junit.*.xml
- name: Upload coverage to Codecov
if: always()
uses: codecov/codecov-action@v3
with:
token: ${{ secrets.CODECOV_TOKEN }}
directory: .
fail_ci_if_error: false
flags: dagster-${{ matrix.python-version }}-${{ matrix.extraPythonRequirement }}
name: pytest-dagster
verbose: true

event-file:
runs-on: ubuntu-latest
steps:
- name: Upload
uses: actions/upload-artifact@v3
with:
name: Event File
path: ${{ github.event_path }}
2 changes: 1 addition & 1 deletion .github/workflows/test-results.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Test Results

on:
workflow_run:
workflows: ["build & test", "metadata ingestion", "Airflow Plugin"]
workflows: ["build & test", "metadata ingestion", "Airflow Plugin", "Dagster Plugin"]
types:
- completed

Expand Down
32 changes: 32 additions & 0 deletions buildSrc/src/main/java/io/datahubproject/OpenApiEntities.java
Original file line number Diff line number Diff line change
Expand Up @@ -460,6 +460,22 @@ private ObjectNode buildListEntityPath(Entity entity, Set<String> parameterDefin
ObjectNode postMethod = NODE_FACTORY.objectNode()
.put("summary", "Create " + upperFirst)
.put("operationId", String.format("create", upperFirst));
ArrayNode postParameters = NODE_FACTORY.arrayNode();
postMethod.set("parameters", postParameters);
postParameters.add(NODE_FACTORY.objectNode()
.put("in", "query")
.put("name", "createIfNotExists")
.put("description", "Create the aspect if it does not already exist.")
.set("schema", NODE_FACTORY.objectNode()
.put("type", "boolean")
.put("default", false)));
postParameters.add(NODE_FACTORY.objectNode()
.put("in", "query")
.put("name", "createEntityIfNotExists")
.put("description", "Create the entity ONLY if it does not already exist. Fails in case when the entity exists.")
.set("schema", NODE_FACTORY.objectNode()
.put("type", "boolean")
.put("default", false)));
postMethod.set("requestBody", NODE_FACTORY.objectNode()
.put("description", "Create " + entity.getName() + " entities.")
.put("required", true)
Expand Down Expand Up @@ -610,6 +626,22 @@ private ObjectNode buildSingleEntityAspectPath(Entity entity, String aspect) {
ObjectNode postMethod = NODE_FACTORY.objectNode()
.put("summary", String.format("Create aspect %s on %s ", aspect, upperFirstEntity))
.put("operationId", String.format("create%s", upperFirstAspect));
ArrayNode postParameters = NODE_FACTORY.arrayNode();
postMethod.set("parameters", postParameters);
postParameters.add(NODE_FACTORY.objectNode()
.put("in", "query")
.put("name", "createIfNotExists")
.put("description", "Create the aspect if it does not already exist.")
.set("schema", NODE_FACTORY.objectNode()
.put("type", "boolean")
.put("default", false)));
postParameters.add(NODE_FACTORY.objectNode()
.put("in", "query")
.put("name", "createEntityIfNotExists")
.put("description", "Create the entity if it does not already exist. Fails in case when the entity exists.")
.set("schema", NODE_FACTORY.objectNode()
.put("type", "boolean")
.put("default", false)));
postMethod.set("requestBody", NODE_FACTORY.objectNode()
.put("description", String.format("Create aspect %s on %s entity.", aspect, upperFirstEntity))
.put("required", true).set("content", NODE_FACTORY.objectNode()
Expand Down
2 changes: 1 addition & 1 deletion datahub-frontend/app/auth/AuthModule.java
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ protected OperationContext provideOperationContext(final Authentication systemAu
.authentication(systemAuthentication)
.build();
OperationContextConfig systemConfig = OperationContextConfig.builder()
.searchAuthorizationConfiguration(configurationProvider.getAuthorization().getSearch())
.viewAuthorizationConfiguration(configurationProvider.getAuthorization().getView())
.allowSystemAuthentication(true)
.build();

Expand Down
1 change: 1 addition & 0 deletions datahub-frontend/conf/logback.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
<filter class="com.linkedin.metadata.utils.log.LogMessageFilter">
<excluded>Unable to renew the session. The session store may not support this feature</excluded>
<excluded>Preferred JWS algorithm: null not available. Using all metadata algorithms:</excluded>
<excluded>Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props</excluded>
</filter>
</appender>

Expand Down
1 change: 1 addition & 0 deletions datahub-frontend/run/logback.xml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
<filter class="com.linkedin.metadata.utils.log.LogMessageFilter">
<excluded>Unable to renew the session. The session store may not support this feature</excluded>
<excluded>Preferred JWS algorithm: null not available. Using all metadata algorithms:</excluded>
<excluded>Config does not exist: file:///etc/datahub/plugins/frontend/auth/user.props</excluded>
</filter>
</appender>

Expand Down
2 changes: 1 addition & 1 deletion datahub-graphql-core/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ plugins {


dependencies {
implementation project(':metadata-service:restli-client')
implementation project(':metadata-service:restli-client-api')
implementation project(':metadata-service:auth-impl')
implementation project(':metadata-service:auth-config')
implementation project(':metadata-service:configuration')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -349,14 +349,12 @@
import com.linkedin.metadata.query.filter.SortCriterion;
import com.linkedin.metadata.query.filter.SortOrder;
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.secret.SecretService;
import com.linkedin.metadata.service.DataProductService;
import com.linkedin.metadata.service.ERModelRelationshipService;
import com.linkedin.metadata.service.FormService;
import com.linkedin.metadata.service.LineageService;
import com.linkedin.metadata.service.OwnershipTypeService;
import com.linkedin.metadata.service.QueryService;
import com.linkedin.metadata.service.RestrictedService;
import com.linkedin.metadata.service.SettingsService;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.metadata.timeline.TimelineService;
Expand All @@ -368,6 +366,8 @@
import graphql.schema.DataFetchingEnvironment;
import graphql.schema.StaticDataFetcher;
import graphql.schema.idl.RuntimeWiring;
import io.datahubproject.metadata.services.RestrictedService;
import io.datahubproject.metadata.services.SecretService;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
Expand Down Expand Up @@ -1023,13 +1023,14 @@ private DataFetcher getEntitiesResolver() {
return new BatchGetEntitiesResolver(
entityTypes,
(env) -> {
final QueryContext context = env.getContext();
List<String> urns = env.getArgument(URNS_FIELD_NAME);
return urns.stream()
.map(UrnUtils::getUrn)
.map(
(urn) -> {
try {
Urn entityUrn = Urn.createFromString(urn);
return UrnToEntityMapper.map(entityUrn);
return UrnToEntityMapper.map(context, urn);
} catch (Exception e) {
throw new RuntimeException("Failed to get entity", e);
}
Expand All @@ -1043,8 +1044,9 @@ private DataFetcher getEntityResolver() {
entityTypes,
(env) -> {
try {
final QueryContext context = env.getContext();
Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME));
return UrnToEntityMapper.map(urn);
return UrnToEntityMapper.map(context, urn);
} catch (Exception e) {
throw new RuntimeException("Failed to get entity", e);
}
Expand Down Expand Up @@ -1183,9 +1185,12 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) {
new DeleteGlossaryEntityResolver(this.entityClient, this.entityService))
.dataFetcher(
"updateName", new UpdateNameResolver(this.entityService, this.entityClient))
.dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService))
.dataFetcher(
"removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService))
"addRelatedTerms",
new AddRelatedTermsResolver(this.entityService, this.entityClient))
.dataFetcher(
"removeRelatedTerms",
new RemoveRelatedTermsResolver(this.entityService, this.entityClient))
.dataFetcher(
"createNativeUserResetToken",
new CreateNativeUserResetTokenResolver(this.nativeUserService))
Expand Down Expand Up @@ -1726,7 +1731,8 @@ private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) {
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
.dataFetcher(
"aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)));
"aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))
.dataFetcher("exists", new EntityExistsResolver(entityService)));
builder.type(
"CorpUserInfo",
typeWiring ->
Expand Down Expand Up @@ -2183,7 +2189,12 @@ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) {
"dataFlow",
new LoadableTypeResolver<>(
dataFlowType,
(env) -> ((DataJob) env.getSource()).getDataFlow().getUrn()))
(env) -> {
final DataJob dataJob = env.getSource();
return dataJob.getDataFlow() != null
? dataJob.getDataFlow().getUrn()
: null;
}))
.dataFetcher(
"dataPlatformInstance",
new LoadableTypeResolver<>(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,20 +23,20 @@
import com.linkedin.metadata.graph.SiblingGraphService;
import com.linkedin.metadata.models.registry.EntityRegistry;
import com.linkedin.metadata.recommendation.RecommendationsService;
import com.linkedin.metadata.secret.SecretService;
import com.linkedin.metadata.service.DataProductService;
import com.linkedin.metadata.service.ERModelRelationshipService;
import com.linkedin.metadata.service.FormService;
import com.linkedin.metadata.service.LineageService;
import com.linkedin.metadata.service.OwnershipTypeService;
import com.linkedin.metadata.service.QueryService;
import com.linkedin.metadata.service.RestrictedService;
import com.linkedin.metadata.service.SettingsService;
import com.linkedin.metadata.service.ViewService;
import com.linkedin.metadata.timeline.TimelineService;
import com.linkedin.metadata.timeseries.TimeseriesAspectService;
import com.linkedin.metadata.version.GitVersion;
import com.linkedin.usage.UsageClient;
import io.datahubproject.metadata.services.RestrictedService;
import io.datahubproject.metadata.services.SecretService;
import lombok.Data;

@Data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -220,7 +220,8 @@ private List<BarSegment> extractBarSegmentsFromAggregations(
.collect(Collectors.toList());
}

public Row buildRow(String groupByValue, Function<String, Cell> groupByValueToCell, int count) {
public static Row buildRow(
String groupByValue, Function<String, Cell> groupByValueToCell, int count) {
List<String> values = ImmutableList.of(groupByValue, String.valueOf(count));
List<Cell> cells =
ImmutableList.of(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public static Cell buildCellWithEntityLandingPage(String urn) {
Cell result = new Cell();
result.setValue(urn);
try {
Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn));
Entity entity = UrnToEntityMapper.map(null, Urn.createFromString(urn));
result.setEntity(entity);
result.setLinkParams(
LinkParams.builder()
Expand Down
Loading

0 comments on commit 1b07542

Please sign in to comment.