diff --git a/.github/workflows/airflow-plugin.yml b/.github/workflows/airflow-plugin.yml index d0c0f52781b9af..70816e5f093d13 100644 --- a/.github/workflows/airflow-plugin.yml +++ b/.github/workflows/airflow-plugin.yml @@ -32,6 +32,7 @@ jobs: strategy: matrix: include: + # Note: this should be kept in sync with tox.ini. - python-version: "3.8" extra_pip_requirements: "apache-airflow~=2.1.4" extra_pip_extras: plugin-v1 @@ -39,16 +40,21 @@ jobs: extra_pip_requirements: "apache-airflow~=2.2.4" extra_pip_extras: plugin-v1 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.4.0" + extra_pip_requirements: 'apache-airflow~=2.4.0 pluggy==1.0.0 "pendulum<3.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow~=2.6.0" + extra_pip_requirements: 'apache-airflow~=2.6.0 "pendulum<3.0"' extra_pip_extras: plugin-v2 - python-version: "3.10" - extra_pip_requirements: "apache-airflow>=2.7.0" + extra_pip_requirements: "apache-airflow>=2.7.0 pydantic==2.4.2" extra_pip_extras: plugin-v2 fail-fast: false steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 10c137a206531a..dab64cf2dca5e6 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -37,11 +37,11 @@ jobs: with: timezoneLinux: ${{ matrix.timezone }} - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/check-datahub-jars.yml b/.github/workflows/check-datahub-jars.yml index 8e507ea40fd963..46d97ffec88618 100644 --- a/.github/workflows/check-datahub-jars.yml +++ b/.github/workflows/check-datahub-jars.yml @@ -28,11 +28,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/docker-unified.yml b/.github/workflows/docker-unified.yml index 8bb82a0a0608cb..7cef38b1cd47ce 100644 --- a/.github/workflows/docker-unified.yml +++ b/.github/workflows/docker-unified.yml @@ -79,6 +79,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -135,6 +140,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -191,6 +201,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -247,6 +262,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -303,6 +323,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - name: Pre-build artifacts for docker image @@ -537,6 +562,11 @@ jobs: needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_slim_build] steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -618,6 +648,11 @@ jobs: needs_artifact_download: ${{ (steps.filter.outputs.datahub-ingestion-base == 'true' || steps.filter.outputs.datahub-ingestion == 'true') && needs.setup.outputs.publish != 'true' }} needs: [setup, datahub_ingestion_base_full_build] steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - name: Check out the repo uses: hsheth2/sane-checkout-action@v1 - uses: dorny/paths-filter@v2 @@ -696,7 +731,12 @@ jobs: strategy: fail-fast: false matrix: - test_strategy: ["no_cypress", "cypress_suite1", "cypress_rest"] + test_strategy: [ + "no_cypress_suite0", + "no_cypress_suite1", + "cypress_suite1", + "cypress_rest" + ] needs: [ setup, @@ -715,11 +755,11 @@ jobs: run: df -h . && docker images - name: Check out the repo uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" @@ -792,11 +832,6 @@ jobs: ACTIONS_CONFIG: "https://raw.githubusercontent.com/acryldata/datahub-actions/main/docker/config/executor.yaml" run: | ./smoke-test/run-quickstart.sh - - name: sleep 60s - run: | - # we are doing this because gms takes time to get ready - # and we don't have a better readiness check when bootstrap is done - sleep 60s - name: Disk Check run: df -h . && docker images - name: Disable ES Disk Threshold @@ -876,13 +911,13 @@ jobs: ] steps: - uses: aws-actions/configure-aws-credentials@v1 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: aws-access-key-id: ${{ secrets.AWS_SQS_ACCESS_KEY_ID }} aws-secret-access-key: ${{ secrets.AWS_SQS_ACCESS_KEY }} aws-region: us-west-2 - uses: isbang/sqs-action@v0.2.0 - if: ${{ needs.setup.outputs.publish != 'false' }} + if: ${{ needs.setup.outputs.publish != 'false' && github.repository_owner == 'datahub-project' && needs.setup.outputs.repository_name == 'datahub' }} with: sqs-url: ${{ secrets.DATAHUB_HEAD_SYNC_QUEUE }} message: '{ "command": "git-sync", "args" : {"repoName": "${{ needs.setup.outputs.repository_name }}", "repoOrg": "${{ github.repository_owner }}", "repoBranch": "${{ needs.setup.outputs.branch_name }}", "repoShaShort": "${{ needs.setup.outputs.short_sha }}" }}' diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index c94282938120e4..29953b8b70d911 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -27,11 +27,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-ingestion.yml b/.github/workflows/metadata-ingestion.yml index ec6bd4141cc6fc..4e04fef3b3980b 100644 --- a/.github/workflows/metadata-ingestion.yml +++ b/.github/workflows/metadata-ingestion.yml @@ -44,6 +44,11 @@ jobs: - python-version: "3.10" fail-fast: false steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/metadata-io.yml b/.github/workflows/metadata-io.yml index 48f230ce14c8db..2188fcb07c77a6 100644 --- a/.github/workflows/metadata-io.yml +++ b/.github/workflows/metadata-io.yml @@ -29,11 +29,11 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v3 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/metadata-model.yml b/.github/workflows/metadata-model.yml index eb098a327e4cb5..d0112f1b14e7af 100644 --- a/.github/workflows/metadata-model.yml +++ b/.github/workflows/metadata-model.yml @@ -29,6 +29,11 @@ jobs: runs-on: ubuntu-latest needs: setup steps: + - name: Set up JDK 17 + uses: actions/setup-java@v3 + with: + distribution: "zulu" + java-version: 17 - uses: actions/checkout@v3 - uses: actions/setup-python@v4 with: diff --git a/.github/workflows/publish-datahub-jars.yml b/.github/workflows/publish-datahub-jars.yml index ec7985ef3b3d03..24d1c5436b3156 100644 --- a/.github/workflows/publish-datahub-jars.yml +++ b/.github/workflows/publish-datahub-jars.yml @@ -49,11 +49,11 @@ jobs: if: ${{ needs.check-secret.outputs.publish-enabled == 'true' }} steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/.github/workflows/spark-smoke-test.yml b/.github/workflows/spark-smoke-test.yml index 70b66d6452b266..60e183cce5179c 100644 --- a/.github/workflows/spark-smoke-test.yml +++ b/.github/workflows/spark-smoke-test.yml @@ -30,11 +30,11 @@ jobs: runs-on: ubuntu-latest steps: - uses: hsheth2/sane-checkout-action@v1 - - name: Set up JDK 11 + - name: Set up JDK 17 uses: actions/setup-java@v3 with: distribution: "zulu" - java-version: 11 + java-version: 17 - uses: actions/setup-python@v4 with: python-version: "3.10" diff --git a/build.gradle b/build.gradle index c1278a6dab1a04..bb01a15a7db8d6 100644 --- a/build.gradle +++ b/build.gradle @@ -1,26 +1,30 @@ buildscript { + ext.jdkVersion = 17 + ext.javaClassVersion = 11 + ext.junitJupiterVersion = '5.6.1' // Releases: https://github.com/linkedin/rest.li/blob/master/CHANGELOG.md - ext.pegasusVersion = '29.46.8' + ext.pegasusVersion = '29.48.4' ext.mavenVersion = '3.6.3' ext.springVersion = '5.3.29' ext.springBootVersion = '2.7.14' ext.openTelemetryVersion = '1.18.0' - ext.neo4jVersion = '4.4.9' - ext.neo4jTestVersion = '4.4.25' - ext.neo4jApocVersion = '4.4.0.20:all' + ext.neo4jVersion = '5.14.0' + ext.neo4jTestVersion = '5.14.0' + ext.neo4jApocVersion = '5.14.0' ext.testContainersVersion = '1.17.4' ext.elasticsearchVersion = '2.9.0' // ES 7.10, Opensearch 1.x, 2.x - ext.jacksonVersion = '2.15.2' + ext.jacksonVersion = '2.15.3' ext.jettyVersion = '9.4.46.v20220331' ext.playVersion = '2.8.18' ext.log4jVersion = '2.19.0' ext.slf4jVersion = '1.7.36' - ext.logbackClassic = '1.2.12' + ext.logbackClassic = '1.2.13' ext.hadoop3Version = '3.3.5' ext.kafkaVersion = '2.3.0' ext.hazelcastVersion = '5.3.6' ext.ebeanVersion = '12.16.1' + ext.googleJavaFormatVersion = '1.18.1' ext.docker_registry = 'linkedin' @@ -28,20 +32,22 @@ buildscript { buildscript.repositories.addAll(project.repositories) dependencies { classpath 'com.linkedin.pegasus:gradle-plugins:' + pegasusVersion - classpath 'com.github.node-gradle:gradle-node-plugin:2.2.4' + classpath 'com.github.node-gradle:gradle-node-plugin:7.0.1' classpath 'io.acryl.gradle.plugin:gradle-avro-plugin:0.2.0' classpath 'org.springframework.boot:spring-boot-gradle-plugin:' + springBootVersion classpath "io.codearte.gradle.nexus:gradle-nexus-staging-plugin:0.30.0" classpath "com.palantir.gradle.gitversion:gradle-git-version:3.0.0" classpath "org.gradle.playframework:gradle-playframework:0.14" - classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.1" + classpath "gradle.plugin.org.hidetake:gradle-swagger-generator-plugin:2.19.2" } } plugins { - id 'com.gorylenko.gradle-git-properties' version '2.4.0-rc2' - id 'com.github.johnrengelman.shadow' version '6.1.0' + id 'com.gorylenko.gradle-git-properties' version '2.4.1' + id 'com.github.johnrengelman.shadow' version '8.1.1' apply false id 'com.palantir.docker' version '0.35.0' apply false + id 'com.avast.gradle.docker-compose' version '0.17.5' + id "com.diffplug.spotless" version "6.23.3" // https://blog.ltgt.net/javax-jakarta-mess-and-gradle-solution/ // TODO id "org.gradlex.java-ecosystem-capabilities" version "1.0" } @@ -147,19 +153,20 @@ project.ext.externalDependency = [ 'log4jApi': "org.apache.logging.log4j:log4j-api:$log4jVersion", 'log4j12Api': "org.slf4j:log4j-over-slf4j:$slf4jVersion", 'log4j2Api': "org.apache.logging.log4j:log4j-to-slf4j:$log4jVersion", - 'lombok': 'org.projectlombok:lombok:1.18.16', + 'lombok': 'org.projectlombok:lombok:1.18.30', 'mariadbConnector': 'org.mariadb.jdbc:mariadb-java-client:2.6.0', 'mavenArtifact': "org.apache.maven:maven-artifact:$mavenVersion", 'mixpanel': 'com.mixpanel:mixpanel-java:1.4.4', - 'mockito': 'org.mockito:mockito-core:3.0.0', - 'mockitoInline': 'org.mockito:mockito-inline:3.0.0', + 'mockito': 'org.mockito:mockito-core:4.11.0', + 'mockitoInline': 'org.mockito:mockito-inline:4.11.0', 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', 'mysqlConnector': 'mysql:mysql-connector-java:8.0.20', 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jTestVersion, 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, 'neo4jTestJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jTestVersion, - 'neo4jApoc': 'org.neo4j.procedure:apoc:' + neo4jApocVersion, + 'neo4jApocCore': 'org.neo4j.procedure:apoc-core:' + neo4jApocVersion, + 'neo4jApocCommon': 'org.neo4j.procedure:apoc-common:' + neo4jApocVersion, 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion, 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', @@ -188,8 +195,8 @@ project.ext.externalDependency = [ 'servletApi': 'javax.servlet:javax.servlet-api:3.1.0', 'shiroCore': 'org.apache.shiro:shiro-core:1.11.0', 'snakeYaml': 'org.yaml:snakeyaml:2.0', - 'sparkSql' : 'org.apache.spark:spark-sql_2.11:2.4.8', - 'sparkHive' : 'org.apache.spark:spark-hive_2.11:2.4.8', + 'sparkSql' : 'org.apache.spark:spark-sql_2.12:3.0.3', + 'sparkHive' : 'org.apache.spark:spark-hive_2.12:3.0.3', 'springBeans': "org.springframework:spring-beans:$springVersion", 'springContext': "org.springframework:spring-context:$springVersion", 'springCore': "org.springframework:spring-core:$springVersion", @@ -208,7 +215,6 @@ project.ext.externalDependency = [ 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.2.15', 'swaggerCli': 'io.swagger.codegen.v3:swagger-codegen-cli:3.0.46', - 'testngJava8': 'org.testng:testng:7.5.1', 'testng': 'org.testng:testng:7.8.0', 'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion, 'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion, @@ -224,15 +230,69 @@ project.ext.externalDependency = [ 'charle': 'com.charleskorn.kaml:kaml:0.53.0', 'common': 'commons-io:commons-io:2.7', 'jline':'jline:jline:1.4.1', - 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0' - + 'jetbrains':' org.jetbrains.kotlin:kotlin-stdlib:1.6.0', + 'annotationApi': 'javax.annotation:javax.annotation-api:1.3.2' ] allprojects { apply plugin: 'idea' apply plugin: 'eclipse' - apply plugin: 'checkstyle' // apply plugin: 'org.gradlex.java-ecosystem-capabilities' + + tasks.withType(Test).configureEach { + // https://docs.gradle.org/current/userguide/performance.html + maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 + + if (project.configurations.getByName("testImplementation").getDependencies() + .any{ it.getName().contains("testng") }) { + useTestNG() + } + } + + if (project.plugins.hasPlugin('java') + || project.plugins.hasPlugin('java-library') + || project.plugins.hasPlugin('application') + || project.plugins.hasPlugin('pegasus')) { + + java { + toolchain { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + compileJava { + options.release = javaClassVersion + } + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + // not duplicated, need to set this outside and inside afterEvaluate + afterEvaluate { + compileJava { + options.release = javaClassVersion + } + tasks.withType(JavaCompile).configureEach { + javaCompiler = javaToolchains.compilerFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + + tasks.withType(JavaExec).configureEach { + javaLauncher = javaToolchains.launcherFor { + languageVersion = JavaLanguageVersion.of(jdkVersion) + } + } + } + } } configure(subprojects.findAll {! it.name.startsWith('spark-lineage')}) { @@ -253,6 +313,7 @@ subprojects { apply plugin: 'maven-publish' apply plugin: 'com.gorylenko.gradle-git-properties' + apply plugin: 'com.diffplug.spotless' gitProperties { keys = ['git.commit.id','git.commit.id.describe','git.commit.time'] @@ -263,9 +324,11 @@ subprojects { failOnNoGitDirectory = false } - plugins.withType(JavaPlugin) { + plugins.withType(JavaPlugin).configureEach { dependencies { + implementation externalDependency.annotationApi constraints { + implementation("com.google.googlejavaformat:google-java-format:$googleJavaFormatVersion") implementation('io.netty:netty-all:4.1.100.Final') implementation('org.apache.commons:commons-compress:1.21') implementation('org.apache.velocity:velocity-engine-core:2.3') @@ -275,30 +338,43 @@ subprojects { } } - checkstyle { - configDirectory = file("${project.rootDir}/gradle/checkstyle") - sourceSets = [ getProject().sourceSets.main, getProject().sourceSets.test ] - toolVersion = "8.0" - maxWarnings = 0 - ignoreFailures = false + spotless { + java { + googleJavaFormat() + target project.fileTree(project.projectDir) { + include 'src/**/*.java' + exclude 'src/**/resources/' + exclude 'src/**/generated/' + exclude 'src/**/mainGeneratedDataTemplate/' + exclude 'src/**/mainGeneratedRest/' + exclude 'src/renamed/avro/' + exclude 'src/test/sample-test-plugins/' + } + } } - } - tasks.withType(JavaCompile).configureEach { - javaCompiler = javaToolchains.compilerFor { - languageVersion = JavaLanguageVersion.of(11) - } - } - tasks.withType(Test).configureEach { - javaLauncher = javaToolchains.launcherFor { - languageVersion = JavaLanguageVersion.of(11) + if (project.plugins.hasPlugin('pegasus')) { + dependencies { + dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 + restClientCompile spec.product.pegasus.restliClient + } } - // https://docs.gradle.org/current/userguide/performance.html - maxParallelForks = Runtime.runtime.availableProcessors().intdiv(2) ?: 1 - if (project.configurations.getByName("testImplementation").getDependencies() - .any{ it.getName().contains("testng") }) { - useTestNG() + afterEvaluate { + def spotlessJavaTask = tasks.findByName('spotlessJava') + def processTask = tasks.findByName('processResources') + if (processTask != null) { + spotlessJavaTask.dependsOn processTask + } + def compileJavaTask = tasks.findByName('compileJava') + if (compileJavaTask != null) { + spotlessJavaTask.dependsOn compileJavaTask + } + // TODO - Do not run this in CI. How? + // tasks.withType(JavaCompile) { + // finalizedBy(tasks.findByName('spotlessApply')) + // } } } @@ -306,6 +382,7 @@ subprojects { if (project.plugins.hasPlugin('pegasus')) { dependencies { dataTemplateCompile spec.product.pegasus.data + dataTemplateCompile externalDependency.annotationApi // support > jdk8 restClientCompile spec.product.pegasus.restliClient } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 1f9d30d520171b..0c2d91e1f7ac1b 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -1,9 +1,11 @@ -apply plugin: 'java' - buildscript { apply from: '../repositories.gradle' } +plugins { + id 'java' +} + dependencies { /** * Forked version of abandoned repository: https://github.com/fge/json-schema-avro @@ -21,6 +23,9 @@ dependencies { implementation 'com.fasterxml.jackson.dataformat:jackson-dataformat-yaml:2.13.5' implementation 'commons-io:commons-io:2.11.0' - compileOnly 'org.projectlombok:lombok:1.18.14' - annotationProcessor 'org.projectlombok:lombok:1.18.14' + compileOnly 'org.projectlombok:lombok:1.18.30' + annotationProcessor 'org.projectlombok:lombok:1.18.30' + + // pegasus dependency, overrides for tasks + implementation 'com.linkedin.pegasus:gradle-plugins:29.48.4' } \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java new file mode 100644 index 00000000000000..2460abcad6f9e9 --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/PegasusPlugin.java @@ -0,0 +1,2444 @@ +/* + * Copyright (c) 2019 LinkedIn Corp. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package com.linkedin.pegasus.gradle; + +import com.linkedin.pegasus.gradle.PegasusOptions.IdlOptions; +import com.linkedin.pegasus.gradle.internal.CompatibilityLogChecker; +import com.linkedin.pegasus.gradle.tasks.ChangedFileReportTask; +import com.linkedin.pegasus.gradle.tasks.CheckIdlTask; +import com.linkedin.pegasus.gradle.tasks.CheckPegasusSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.CheckRestModelTask; +import com.linkedin.pegasus.gradle.tasks.CheckSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.GenerateAvroSchemaTask; +import com.linkedin.pegasus.gradle.tasks.GenerateDataTemplateTask; +import com.linkedin.pegasus.gradle.tasks.GeneratePegasusSnapshotTask; +import com.linkedin.pegasus.gradle.tasks.GenerateRestClientTask; +import com.linkedin.pegasus.gradle.tasks.GenerateRestModelTask; +import com.linkedin.pegasus.gradle.tasks.PublishRestModelTask; +import com.linkedin.pegasus.gradle.tasks.TranslateSchemasTask; +import com.linkedin.pegasus.gradle.tasks.ValidateExtensionSchemaTask; +import com.linkedin.pegasus.gradle.tasks.ValidateSchemaAnnotationTask; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.lang.reflect.Method; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Properties; +import java.util.Set; +import java.util.TreeSet; +import java.util.function.Function; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import org.gradle.api.Action; +import org.gradle.api.GradleException; +import org.gradle.api.Plugin; +import org.gradle.api.Project; +import org.gradle.api.Task; +import org.gradle.api.artifacts.Configuration; +import org.gradle.api.artifacts.ConfigurationContainer; +import org.gradle.api.file.FileCollection; +import org.gradle.api.plugins.JavaBasePlugin; +import org.gradle.api.plugins.JavaPlugin; +import org.gradle.api.plugins.JavaPluginConvention; +import org.gradle.api.plugins.JavaPluginExtension; +import org.gradle.api.publish.PublishingExtension; +import org.gradle.api.publish.ivy.IvyPublication; +import org.gradle.api.publish.ivy.plugins.IvyPublishPlugin; +import org.gradle.api.tasks.Copy; +import org.gradle.api.tasks.Delete; +import org.gradle.api.tasks.SourceSet; +import org.gradle.api.tasks.SourceSetContainer; +import org.gradle.api.tasks.Sync; +import org.gradle.api.tasks.TaskProvider; +import org.gradle.api.tasks.bundling.Jar; +import org.gradle.api.tasks.compile.JavaCompile; +import org.gradle.api.tasks.javadoc.Javadoc; +import org.gradle.language.base.plugins.LifecycleBasePlugin; +import org.gradle.language.jvm.tasks.ProcessResources; +import org.gradle.plugins.ide.eclipse.EclipsePlugin; +import org.gradle.plugins.ide.eclipse.model.EclipseModel; +import org.gradle.plugins.ide.idea.IdeaPlugin; +import org.gradle.plugins.ide.idea.model.IdeaModule; +import org.gradle.util.GradleVersion; + + +/** + * Pegasus code generation plugin. + * The supported project layout for this plugin is as follows: + * + *
+ *   --- api/
+ *   |   --- build.gradle
+ *   |   --- src/
+ *   |       --- <sourceSet>/
+ *   |       |   --- idl/
+ *   |       |   |   --- <published idl (.restspec.json) files>
+ *   |       |   --- java/
+ *   |       |   |   --- <packageName>/
+ *   |       |   |       --- <common java files>
+ *   |       |   --- pegasus/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <data schema (.pdsc) files>
+ *   |       --- <sourceSet>GeneratedDataTemplate/
+ *   |       |   --- java/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <data template source files generated from data schema (.pdsc) files>
+ *   |       --- <sourceSet>GeneratedAvroSchema/
+ *   |       |   --- avro/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <avsc avro schema files (.avsc) generated from pegasus schema files>
+ *   |       --- <sourceSet>GeneratedRest/
+ *   |           --- java/
+ *   |               --- <packageName>/
+ *   |                   --- <rest client source (.java) files generated from published idl>
+ *   --- impl/
+ *   |   --- build.gradle
+ *   |   --- src/
+ *   |       --- <sourceSet>/
+ *   |       |   --- java/
+ *   |       |       --- <packageName>/
+ *   |       |           --- <resource class source (.java) files>
+ *   |       --- <sourceSet>GeneratedRest/
+ *   |           --- idl/
+ *   |               --- <generated idl (.restspec.json) files>
+ *   --- <other projects>/
+ * 
+ * + *

Performs the following functions:

+ * + *

Generate data model and data template jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, the plugin generates the data template source (.java) files from the + * data schema (.pdsc) files, and furthermore compiles the source files and packages them + * to jar files. Details of jar contents will be explained in following paragraphs. + * In general, data schema files should exist only in api projects. + *

+ * + *

+ * Configure the server and client implementation projects to depend on the + * api project's dataTemplate configuration to get access to the generated data templates + * from within these projects. This allows api classes to be built first so that implementation + * projects can consume them. We recommend this structure to avoid circular dependencies + * (directly or indirectly) among implementation projects. + *

+ * + *

Detail:

+ * + *

+ * Generates data template source (.java) files from data schema (.pdsc) files, + * compiles the data template source (.java) files into class (.class) files, + * creates a data model jar file and a data template jar file. + * The data model jar file contains the source data schema (.pdsc) files. + * The data template jar file contains both the source data schema (.pdsc) files + * and the generated data template class (.class) files. + *

+ * + *

+ * In the data template generation phase, the plugin creates a new target source set + * for the generated files. The new target source set's name is the input source set name's + * suffixed with "GeneratedDataTemplate", e.g. "mainGeneratedDataTemplate". + * The plugin invokes PegasusDataTemplateGenerator to generate data template source (.java) files + * for all data schema (.pdsc) files present in the input source set's pegasus + * directory, e.g. "src/main/pegasus". The generated data template source (.java) files + * will be in the new target source set's java source directory, e.g. + * "src/mainGeneratedDataTemplate/java". In addition to + * the data schema (.pdsc) files in the pegasus directory, the dataModel configuration + * specifies resolver path for the PegasusDataTemplateGenerator. The resolver path + * provides the data schemas and previously generated data template classes that + * may be referenced by the input source set's data schemas. In most cases, the dataModel + * configuration should contain data template jars. + *

+ * + *

+ * The next phase is the data template compilation phase, the plugin compiles the generated + * data template source (.java) files into class files. The dataTemplateCompile configuration + * specifies the pegasus jars needed to compile these classes. The compileClasspath of the + * target source set is a composite of the dataModel configuration which includes the data template + * classes that were previously generated and included in the dependent data template jars, + * and the dataTemplateCompile configuration. + * This configuration should specify a dependency on the Pegasus data jar. + *

+ * + *

+ * The following phase is creating the the data model jar and the data template jar. + * This plugin creates the data model jar that includes the contents of the + * input source set's pegasus directory, and sets the jar file's classification to + * "data-model". Hence, the resulting jar file's name should end with "-data-model.jar". + * It adds the data model jar as an artifact to the dataModel configuration. + * This jar file should only contain data schema (.pdsc) files. + *

+ * + *

+ * This plugin also create the data template jar that includes the contents of the input + * source set's pegasus directory and the java class output directory of the + * target source set. It sets the jar file's classification to "data-template". + * Hence, the resulting jar file's name should end with "-data-template.jar". + * It adds the data template jar file as an artifact to the dataTemplate configuration. + * This jar file contains both data schema (.pdsc) files and generated data template + * class (.class) files. + *

+ * + *

+ * This plugin will ensure that data template source files are generated before + * compiling the input source set and before the idea and eclipse tasks. It + * also adds the generated classes to the compileClasspath of the input source set. + *

+ * + *

+ * The configurations that apply to generating the data model and data template jars + * are as follow: + *

+ *

+ * + *

Performs the following functions:

+ * + *

Generate avro schema jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, the task 'generateAvroSchema' generates the avro schema (.avsc) + * files from pegasus schema (.pdsc) files. In general, data schema files should exist + * only in api projects. + *

+ * + *

+ * Configure the server and client implementation projects to depend on the + * api project's avroSchema configuration to get access to the generated avro schemas + * from within these projects. + *

+ * + *

+ * This plugin also create the avro schema jar that includes the contents of the input + * source set's avro directory and the avsc schema files. + * The resulting jar file's name should end with "-avro-schema.jar". + *

+ * + *

Generate rest model and rest client jars for each source set.

+ * + *

Overview:

+ * + *

+ * In the api project, generates rest client source (.java) files from the idl, + * compiles the rest client source (.java) files to rest client class (.class) files + * and puts them in jar files. In general, the api project should be only place that + * contains the publishable idl files. If the published idl changes an existing idl + * in the api project, the plugin will emit message indicating this has occurred and + * suggest that the entire project be rebuilt if it is desirable for clients of the + * idl to pick up the newly published changes. + *

+ * + *

+ * In the impl project, generates the idl (.restspec.json) files from the input + * source set's resource class files, then compares them against the existing idl + * files in the api project for compatibility checking. If incompatible changes are + * found, the build fails (unless certain flag is specified, see below). If the + * generated idl passes compatibility checks (see compatibility check levels below), + * publishes the generated idl (.restspec.json) to the api project. + *

+ * + *

Detail:

+ * + *

rest client generation phase: in api project

+ * + *

+ * In this phase, the rest client source (.java) files are generated from the + * api project idl (.restspec.json) files using RestRequestBuilderGenerator. + * The generated rest client source files will be in the new target source set's + * java source directory, e.g. "src/mainGeneratedRest/java". + *

+ * + *

+ * RestRequestBuilderGenerator requires access to the data schemas referenced + * by the idl. The dataModel configuration specifies the resolver path needed + * by RestRequestBuilderGenerator to access the data schemas referenced by + * the idl that is not in the source set's pegasus directory. + * This plugin automatically includes the data schema (.pdsc) files in the + * source set's pegasus directory in the resolver path. + * In most cases, the dataModel configuration should contain data template jars. + * The data template jars contains both data schema (.pdsc) files and generated + * data template class (.class) files. By specifying data template jars instead + * of data model jars, redundant generation of data template classes is avoided + * as classes that can be found in the resolver path are not generated. + *

+ * + *

rest client compilation phase: in api project

+ * + *

+ * In this phase, the plugin compiles the generated rest client source (.java) + * files into class files. The restClientCompile configuration specifies the + * pegasus jars needed to compile these classes. The compile classpath is a + * composite of the dataModel configuration which includes the data template + * classes that were previously generated and included in the dependent data template + * jars, and the restClientCompile configuration. + * This configuration should specify a dependency on the Pegasus restli-client jar. + *

+ * + *

+ * The following stage is creating the the rest model jar and the rest client jar. + * This plugin creates the rest model jar that includes the + * generated idl (.restspec.json) files, and sets the jar file's classification to + * "rest-model". Hence, the resulting jar file's name should end with "-rest-model.jar". + * It adds the rest model jar as an artifact to the restModel configuration. + * This jar file should only contain idl (.restspec.json) files. + *

+ * + *

+ * This plugin also create the rest client jar that includes the generated + * idl (.restspec.json) files and the java class output directory of the + * target source set. It sets the jar file's classification to "rest-client". + * Hence, the resulting jar file's name should end with "-rest-client.jar". + * It adds the rest client jar file as an artifact to the restClient configuration. + * This jar file contains both idl (.restspec.json) files and generated rest client + * class (.class) files. + *

+ * + *

idl generation phase: in server implementation project

+ * + *

+ * Before entering this phase, the plugin will ensure that generating idl will + * occur after compiling the input source set. It will also ensure that IDEA + * and Eclipse tasks runs after rest client source (.java) files are generated. + *

+ * + *

+ * In this phase, the plugin creates a new target source set for the generated files. + * The new target source set's name is the input source set name's* suffixed with + * "GeneratedRest", e.g. "mainGeneratedRest". The plugin invokes + * RestLiResourceModelExporter to generate idl (.restspec.json) files for each + * IdlItem in the input source set's pegasus IdlOptions. The generated idl files + * will be in target source set's idl directory, e.g. "src/mainGeneratedRest/idl". + * For example, the following adds an IdlItem to the source set's pegasus IdlOptions. + * This line should appear in the impl project's build.gradle. If no IdlItem is added, + * this source set will be excluded from generating idl and checking idl compatibility, + * even there are existing idl files. + *

+ *   pegasus.main.idlOptions.addIdlItem(["com.linkedin.restli.examples.groups.server"])
+ * 
+ *

+ * + *

+ * After the idl generation phase, each included idl file is checked for compatibility against + * those in the api project. In case the current interface breaks compatibility, + * by default the build fails and reports all compatibility errors and warnings. Otherwise, + * the build tasks in the api project later will package the resource classes into jar files. + * User can change the compatibility requirement between the current and published idl by + * setting the "rest.model.compatibility" project property, i.e. + * "gradle -Prest.model.compatibility= ..." The following levels are supported: + *

+ * The plugin needs to know where the api project is. It searches the api project in the + * following steps. If all searches fail, the build fails. + *
    + *
  1. + * Use the specified project from the impl project build.gradle file. The ext.apiProject + * property explicitly assigns the api project. E.g. + *
    + *       ext.apiProject = project(':groups:groups-server-api')
    + *     
    + * If multiple such statements exist, the last will be used. Wrong project path causes Gradle + * evaluation error. + *
  2. + *
  3. + * If no ext.apiProject property is defined, the plugin will try to guess the + * api project name with the following conventions. The search stops at the first successful match. + *
      + *
    1. + * If the impl project name ends with the following suffixes, substitute the suffix with "-api". + *
        + *
      1. -impl
      2. + *
      3. -service
      4. + *
      5. -server
      6. + *
      7. -server-impl
      8. + *
      + * This list can be overridden by inserting the following line to the project build.gradle: + *
      + *           ext.apiProjectSubstitutionSuffixes = ['-new-suffix-1', '-new-suffix-2']
      + *         
      + * Alternatively, this setting could be applied globally to all projects by putting it in + * the subprojects section of the root build.gradle. + *
    2. + *
    3. + * Append "-api" to the impl project name. + *
    4. + *
    + *
  4. + *
+ * The plugin invokes RestLiResourceModelCompatibilityChecker to check compatibility. + *

+ * + *

+ * The idl files in the api project are not generated by the plugin, but rather + * "published" from the impl project. The publishRestModel task is used to copy the + * idl files to the api project. This task is invoked automatically if the idls are + * verified to be "safe". "Safe" is determined by the "rest.model.compatibility" + * property. Because this task is skipped if the idls are functionally equivalent + * (not necessarily identical, e.g. differ in doc fields), if the default "equivalent" + * compatibility level is used, no file will be copied. If such automatic publishing + * is intended to be skip, set the "rest.model.skipPublish" property to true. + * Note that all the properties are per-project and can be overridden in each project's + * build.gradle file. + *

+ * + *

+ * Please always keep in mind that if idl publishing is happened, a subsequent whole-project + * rebuild is necessary to pick up the changes. Otherwise, the Hudson job will fail and + * the source code commit will fail. + *

+ * + *

+ * The configurations that apply to generating the rest model and rest client jars + * are as follow: + *

+ *

+ * + *

+ * This plugin considers test source sets whose names begin with 'test' or 'integTest' to be + * test source sets. + *

+ */ +public class PegasusPlugin implements Plugin +{ + public static boolean debug = false; + + private static final GradleVersion MIN_REQUIRED_VERSION = GradleVersion.version("1.0"); // Next: 5.2.1 + private static final GradleVersion MIN_SUGGESTED_VERSION = GradleVersion.version("5.2.1"); // Next: 5.3 + + // + // Constants for generating sourceSet names and corresponding directory names + // for generated code + // + private static final String DATA_TEMPLATE_GEN_TYPE = "DataTemplate"; + private static final String REST_GEN_TYPE = "Rest"; + private static final String AVRO_SCHEMA_GEN_TYPE = "AvroSchema"; + + public static final String DATA_TEMPLATE_FILE_SUFFIX = ".pdsc"; + public static final String PDL_FILE_SUFFIX = ".pdl"; + // gradle property to opt OUT schema annotation validation, by default this feature is enabled. + private static final String DISABLE_SCHEMA_ANNOTATION_VALIDATION = "schema.annotation.validation.disable"; + // gradle property to opt in for destroying stale files from the build directory, + // by default it is disabled, because it triggers hot-reload (even if it results in a no-op) + private static final String DESTROY_STALE_FILES_ENABLE = "enableDestroyStaleFiles"; + public static final Collection DATA_TEMPLATE_FILE_SUFFIXES = new ArrayList<>(); + + public static final String IDL_FILE_SUFFIX = ".restspec.json"; + public static final String SNAPSHOT_FILE_SUFFIX = ".snapshot.json"; + public static final String SNAPSHOT_COMPAT_REQUIREMENT = "rest.model.compatibility"; + public static final String IDL_COMPAT_REQUIREMENT = "rest.idl.compatibility"; + // Pegasus schema compatibility level configuration, which is used to define the {@link CompatibilityLevel}. + public static final String PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.pegasusSchema.compatibility"; + // Pegasus extension schema compatibility level configuration, which is used to define the {@link CompatibilityLevel} + public static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT = "pegasusPlugin.extensionSchema.compatibility"; + // CompatibilityOptions Mode configuration, which is used to define the {@link CompatibilityOptions#Mode} in the compatibility checker. + private static final String PEGASUS_COMPATIBILITY_MODE = "pegasusPlugin.pegasusSchemaCompatibilityCheckMode"; + + private static final Pattern TEST_DIR_REGEX = Pattern.compile("^(integ)?[Tt]est"); + private static final String SNAPSHOT_NO_PUBLISH = "rest.model.noPublish"; + private static final String SNAPSHOT_FORCE_PUBLISH = "rest.model.forcePublish"; + private static final String PROCESS_EMPTY_IDL_DIR = "rest.idl.processEmptyIdlDir"; + private static final String IDL_NO_PUBLISH = "rest.idl.noPublish"; + private static final String IDL_FORCE_PUBLISH = "rest.idl.forcePublish"; + private static final String SKIP_IDL_CHECK = "rest.idl.skipCheck"; + // gradle property to skip running GenerateRestModel task. + // Note it affects GenerateRestModel task only, and does not skip tasks depends on GenerateRestModel. + private static final String SKIP_GENERATE_REST_MODEL= "rest.model.skipGenerateRestModel"; + private static final String SUPPRESS_REST_CLIENT_RESTLI_2 = "rest.client.restli2.suppress"; + private static final String SUPPRESS_REST_CLIENT_RESTLI_1 = "rest.client.restli1.suppress"; + + private static final String GENERATOR_CLASSLOADER_NAME = "pegasusGeneratorClassLoader"; + + private static final String CONVERT_TO_PDL_REVERSE = "convertToPdl.reverse"; + private static final String CONVERT_TO_PDL_KEEP_ORIGINAL = "convertToPdl.keepOriginal"; + private static final String CONVERT_TO_PDL_SKIP_VERIFICATION = "convertToPdl.skipVerification"; + private static final String CONVERT_TO_PDL_PRESERVE_SOURCE_CMD = "convertToPdl.preserveSourceCmd"; + + // Below variables are used to collect data across all pegasus projects (sub-projects) and then print information + // to the user at the end after build is finished. + private static StringBuffer _restModelCompatMessage = new StringBuffer(); + private static final Collection _needCheckinFiles = new ArrayList<>(); + private static final Collection _needBuildFolders = new ArrayList<>(); + private static final Collection _possibleMissingFilesInEarlierCommit = new ArrayList<>(); + + private static final String RUN_ONCE = "runOnce"; + private static final Object STATIC_PROJECT_EVALUATED_LOCK = new Object(); + + private static final List UNUSED_CONFIGURATIONS = Arrays.asList( + "dataTemplateGenerator", "restTools", "avroSchemaGenerator"); + // Directory in the dataTemplate jar that holds schemas translated from PDL to PDSC. + private static final String TRANSLATED_SCHEMAS_DIR = "legacyPegasusSchemas"; + // Enable the use of argFiles for the tasks that support them + private static final String ENABLE_ARG_FILE = "pegasusPlugin.enableArgFile"; + // Enable the generation of fluent APIs + private static final String ENABLE_FLUENT_API = "pegasusPlugin.enableFluentApi"; + + // This config impacts GenerateDataTemplateTask and GenerateRestClientTask; + // If not set, by default all paths generated in these two tasks will be lower-case. + // This default behavior is needed because Linux, MacOS, Windows treat case sensitive paths differently, + // and we want to be consistent, so we choose lower-case as default case for path generated + private static final String CODE_GEN_PATH_CASE_SENSITIVE = "pegasusPlugin.generateCaseSensitivePath"; + + private static final String PEGASUS_PLUGIN_CONFIGURATION = "pegasusPlugin"; + + // Enable the use of generic pegasus schema compatibility checker + private static final String ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK = "pegasusPlugin.enablePegasusSchemaCompatibilityCheck"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT = "PegasusSchemaSnapshot"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT = "PegasusExtensionSchemaSnapshot"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR = "pegasusSchemaSnapshot"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR = "pegasusExtensionSchemaSnapshot"; + + private static final String PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusSchemaSnapshotDir"; + + private static final String PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE = "overridePegasusExtensionSchemaSnapshotDir"; + + private static final String SRC = "src"; + + private static final String SCHEMA_ANNOTATION_HANDLER_CONFIGURATION = "schemaAnnotationHandler"; + + private static final String COMPATIBILITY_OPTIONS_MODE_EXTENSION = "EXTENSION"; + + + @SuppressWarnings("unchecked") + private Class> _thisPluginType = (Class>) + getClass().asSubclass(Plugin.class); + + private Task _generateSourcesJarTask; + private Javadoc _generateJavadocTask; + private Task _generateJavadocJarTask; + private boolean _configureIvyPublications = true; + + public void setPluginType(Class> pluginType) + { + _thisPluginType = pluginType; + } + + public void setSourcesJarTask(Task sourcesJarTask) + { + _generateSourcesJarTask = sourcesJarTask; + } + + public void setJavadocJarTask(Task javadocJarTask) + { + _generateJavadocJarTask = javadocJarTask; + } + + public void setConfigureIvyPublications(boolean configureIvyPublications) { + _configureIvyPublications = configureIvyPublications; + } + + @Override + public void apply(Project project) + { + checkGradleVersion(project); + + project.getPlugins().apply(JavaPlugin.class); + + // this HashMap will have a PegasusOptions per sourceSet + project.getExtensions().getExtraProperties().set("pegasus", new HashMap<>()); + // this map will extract PegasusOptions.GenerationMode to project property + project.getExtensions().getExtraProperties().set("PegasusGenerationMode", + Arrays.stream(PegasusOptions.GenerationMode.values()) + .collect(Collectors.toMap(PegasusOptions.GenerationMode::name, Function.identity()))); + + synchronized (STATIC_PROJECT_EVALUATED_LOCK) + { + // Check if this is the first time the block will run. Pegasus plugin can run multiple times in a build if + // multiple sub-projects applied the plugin. + if (!project.getRootProject().hasProperty(RUN_ONCE) + || !Boolean.parseBoolean(String.valueOf(project.getRootProject().property(RUN_ONCE)))) + { + project.getGradle().projectsEvaluated(gradle -> + gradle.getRootProject().subprojects(subproject -> + UNUSED_CONFIGURATIONS.forEach(configurationName -> { + Configuration conf = subproject.getConfigurations().findByName(configurationName); + if (conf != null && !conf.getDependencies().isEmpty()) { + subproject.getLogger().warn("*** Project {} declares dependency to unused configuration \"{}\". " + + "This configuration is deprecated and you can safely remove the dependency. ***", + subproject.getPath(), configurationName); + } + }) + ) + ); + + // Re-initialize the static variables as they might have stale values from previous run. With Gradle 3.0 and + // gradle daemon enabled, the plugin class might not be loaded for every run. + DATA_TEMPLATE_FILE_SUFFIXES.clear(); + DATA_TEMPLATE_FILE_SUFFIXES.add(DATA_TEMPLATE_FILE_SUFFIX); + DATA_TEMPLATE_FILE_SUFFIXES.add(PDL_FILE_SUFFIX); + + _restModelCompatMessage = new StringBuffer(); + _needCheckinFiles.clear(); + _needBuildFolders.clear(); + _possibleMissingFilesInEarlierCommit.clear(); + + project.getGradle().buildFinished(result -> + { + StringBuilder endOfBuildMessage = new StringBuilder(); + if (_restModelCompatMessage.length() > 0) + { + endOfBuildMessage.append(_restModelCompatMessage); + } + + if (!_needCheckinFiles.isEmpty()) + { + endOfBuildMessage.append(createModifiedFilesMessage(_needCheckinFiles, _needBuildFolders)); + } + + if (!_possibleMissingFilesInEarlierCommit.isEmpty()) + { + endOfBuildMessage.append(createPossibleMissingFilesMessage(_possibleMissingFilesInEarlierCommit)); + } + + if (endOfBuildMessage.length() > 0) + { + result.getGradle().getRootProject().getLogger().quiet(endOfBuildMessage.toString()); + } + }); + + // Set an extra property on the root project to indicate the initialization is complete for the current build. + project.getRootProject().getExtensions().getExtraProperties().set(RUN_ONCE, true); + } + } + + ConfigurationContainer configurations = project.getConfigurations(); + + // configuration for getting the required classes to make pegasus call main methods + configurations.maybeCreate(PEGASUS_PLUGIN_CONFIGURATION); + + // configuration for compiling generated data templates + Configuration dataTemplateCompile = configurations.maybeCreate("dataTemplateCompile"); + dataTemplateCompile.setVisible(false); + + // configuration for running rest client generator + Configuration restClientCompile = configurations.maybeCreate("restClientCompile"); + restClientCompile.setVisible(false); + + // configuration for running data template generator + // DEPRECATED! This configuration is no longer used. Please stop using it. + Configuration dataTemplateGenerator = configurations.maybeCreate("dataTemplateGenerator"); + dataTemplateGenerator.setVisible(false); + + // configuration for running rest client generator + // DEPRECATED! This configuration is no longer used. Please stop using it. + Configuration restTools = configurations.maybeCreate("restTools"); + restTools.setVisible(false); + + // configuration for running Avro schema generator + // DEPRECATED! To skip avro schema generation, use PegasusOptions.generationModes + Configuration avroSchemaGenerator = configurations.maybeCreate("avroSchemaGenerator"); + avroSchemaGenerator.setVisible(false); + + // configuration for depending on data schemas and potentially generated data templates + // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml + Configuration dataModel = configurations.maybeCreate("dataModel"); + Configuration testDataModel = configurations.maybeCreate("testDataModel"); + testDataModel.extendsFrom(dataModel); + + // configuration for depending on data schemas and potentially generated data templates + // and for publishing jars containing data schemas to the project artifacts for including in the ivy.xml + Configuration avroSchema = configurations.maybeCreate("avroSchema"); + Configuration testAvroSchema = configurations.maybeCreate("testAvroSchema"); + testAvroSchema.extendsFrom(avroSchema); + + // configuration for depending on rest idl and potentially generated client builders + // and for publishing jars containing rest idl to the project artifacts for including in the ivy.xml + Configuration restModel = configurations.maybeCreate("restModel"); + Configuration testRestModel = configurations.maybeCreate("testRestModel"); + testRestModel.extendsFrom(restModel); + + // configuration for publishing jars containing data schemas and generated data templates + // to the project artifacts for including in the ivy.xml + // + // published data template jars depends on the configurations used to compile the classes + // in the jar, this includes the data models/templates used by the data template generator + // and the classes used to compile the generated classes. + Configuration dataTemplate = configurations.maybeCreate("dataTemplate"); + dataTemplate.extendsFrom(dataTemplateCompile, dataModel); + Configuration testDataTemplate = configurations.maybeCreate("testDataTemplate"); + testDataTemplate.extendsFrom(dataTemplate, testDataModel); + + // configuration for processing and validating schema annotation during build time. + // + // The configuration contains dependencies to schema annotation handlers which would process schema annotations + // and validate. + Configuration schemaAnnotationHandler = configurations.maybeCreate(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION); + + // configuration for publishing jars containing rest idl and generated client builders + // to the project artifacts for including in the ivy.xml + // + // published client builder jars depends on the configurations used to compile the classes + // in the jar, this includes the data models/templates (potentially generated by this + // project and) used by the data template generator and the classes used to compile + // the generated classes. + Configuration restClient = configurations.maybeCreate("restClient"); + restClient.extendsFrom(restClientCompile, dataTemplate); + Configuration testRestClient = configurations.maybeCreate("testRestClient"); + testRestClient.extendsFrom(restClient, testDataTemplate); + + Properties properties = new Properties(); + InputStream inputStream = getClass().getResourceAsStream("/pegasus-version.properties"); + if (inputStream != null) + { + try + { + properties.load(inputStream); + } + catch (IOException e) + { + throw new GradleException("Unable to read pegasus-version.properties file.", e); + } + + String version = properties.getProperty("pegasus.version"); + + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:data-avro-generator:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:generator:" + version); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "com.linkedin.pegasus:restli-tools:" + version); + } + else + { + project.getLogger().lifecycle("Unable to add pegasus dependencies to {}. Please be sure that " + + "'com.linkedin.pegasus:data', 'com.linkedin.pegasus:data-avro-generator', 'com.linkedin.pegasus:generator', 'com.linkedin.pegasus:restli-tools'" + + " are available on the configuration pegasusPlugin", + project.getPath()); + } + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, "org.slf4j:slf4j-simple:1.7.2"); + project.getDependencies().add(PEGASUS_PLUGIN_CONFIGURATION, project.files(System.getProperty("java.home") + "/../lib/tools.jar")); + + // this call has to be here because: + // 1) artifact cannot be published once projects has been evaluated, so we need to first + // create the tasks and artifact handler, then progressively append sources + // 2) in order to append sources progressively, the source and documentation tasks and artifacts must be + // configured/created before configuring and creating the code generation tasks. + + configureGeneratedSourcesAndJavadoc(project); + + ChangedFileReportTask changedFileReportTask = project.getTasks() + .create("changedFilesReport", ChangedFileReportTask.class); + + project.getTasks().getByName("check").dependsOn(changedFileReportTask); + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + sourceSets.all(sourceSet -> + { + if (sourceSet.getName().toLowerCase(Locale.US).contains("generated")) + { + return; + } + + checkAvroSchemaExist(project, sourceSet); + + // the idl Generator input options will be inside the PegasusOptions class. Users of the + // plugin can set the inputOptions in their build.gradle + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + pegasusOptions.put(sourceSet.getName(), new PegasusOptions()); + + // rest model generation could fail on incompatibility + // if it can fail, fail it early + configureRestModelGeneration(project, sourceSet); + + // Do compatibility check for schemas under "pegasus" directory if the configuration property is provided. + if (isPropertyTrue(project, ENABLE_PEGASUS_SCHEMA_COMPATIBILITY_CHECK)) + { + configurePegasusSchemaSnapshotGeneration(project, sourceSet, false); + } + + configurePegasusSchemaSnapshotGeneration(project, sourceSet, true); + + configureConversionUtilities(project, sourceSet); + + GenerateDataTemplateTask generateDataTemplateTask = configureDataTemplateGeneration(project, sourceSet); + + configureAvroSchemaGeneration(project, sourceSet); + + configureRestClientGeneration(project, sourceSet); + + if (!isPropertyTrue(project, DISABLE_SCHEMA_ANNOTATION_VALIDATION)) + { + configureSchemaAnnotationValidation(project, sourceSet, generateDataTemplateTask); + } + + Task cleanGeneratedDirTask = project.task(sourceSet.getTaskName("clean", "GeneratedDir")); + cleanGeneratedDirTask.doLast(new CacheableAction<>(task -> + { + deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE); + deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE); + deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE); + })); + + // make clean depends on deleting the generated directories + project.getTasks().getByName("clean").dependsOn(cleanGeneratedDirTask); + + // Set data schema directories as resource roots + configureDataSchemaResourcesRoot(project, sourceSet); + }); + + project.getExtensions().getExtraProperties().set(GENERATOR_CLASSLOADER_NAME, getClass().getClassLoader()); + } + + protected void configureSchemaAnnotationValidation(Project project, + SourceSet sourceSet, + GenerateDataTemplateTask generateDataTemplatesTask) + { + // Task would execute based on the following order. + // generateDataTemplatesTask -> validateSchemaAnnotationTask + + // Create ValidateSchemaAnnotation task + ValidateSchemaAnnotationTask validateSchemaAnnotationTask = project.getTasks() + .create(sourceSet.getTaskName("validate", "schemaAnnotation"), ValidateSchemaAnnotationTask.class, task -> + { + task.setInputDir(generateDataTemplatesTask.getInputDir()); + task.setResolverPath(getDataModelConfig(project, sourceSet)); // same resolver path as generateDataTemplatesTask + task.setClassPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION) + .plus(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); + task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + } + ); + + // validateSchemaAnnotationTask depend on generateDataTemplatesTask + validateSchemaAnnotationTask.dependsOn(generateDataTemplatesTask); + + // Check depends on validateSchemaAnnotationTask. + project.getTasks().getByName("check").dependsOn(validateSchemaAnnotationTask); + } + + + + @SuppressWarnings("deprecation") + protected void configureGeneratedSourcesAndJavadoc(Project project) + { + _generateJavadocTask = project.getTasks().create("generateJavadoc", Javadoc.class); + + if (_generateSourcesJarTask == null) + { + // + // configuration for publishing jars containing sources for generated classes + // to the project artifacts for including in the ivy.xml + // + ConfigurationContainer configurations = project.getConfigurations(); + Configuration generatedSources = configurations.maybeCreate("generatedSources"); + Configuration testGeneratedSources = configurations.maybeCreate("testGeneratedSources"); + testGeneratedSources.extendsFrom(generatedSources); + + _generateSourcesJarTask = project.getTasks().create("generateSourcesJar", Jar.class, jarTask -> { + jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); + jarTask.setDescription("Generates a jar file containing the sources for the generated Java classes."); + // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + jarTask.getArchiveClassifier().set("sources"); + }); + + project.getArtifacts().add("generatedSources", _generateSourcesJarTask); + } + + if (_generateJavadocJarTask == null) + { + // + // configuration for publishing jars containing Javadoc for generated classes + // to the project artifacts for including in the ivy.xml + // + ConfigurationContainer configurations = project.getConfigurations(); + Configuration generatedJavadoc = configurations.maybeCreate("generatedJavadoc"); + Configuration testGeneratedJavadoc = configurations.maybeCreate("testGeneratedJavadoc"); + testGeneratedJavadoc.extendsFrom(generatedJavadoc); + + _generateJavadocJarTask = project.getTasks().create("generateJavadocJar", Jar.class, jarTask -> { + jarTask.dependsOn(_generateJavadocTask); + jarTask.setGroup(JavaBasePlugin.DOCUMENTATION_GROUP); + jarTask.setDescription("Generates a jar file containing the Javadoc for the generated Java classes."); + // FIXME change to #getArchiveClassifier().set("sources"); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + jarTask.getArchiveClassifier().set("javadoc"); + jarTask.from(_generateJavadocTask.getDestinationDir()); + }); + + project.getArtifacts().add("generatedJavadoc", _generateJavadocJarTask); + } + else + { + // TODO: Tighten the types so that _generateJavadocJarTask must be of type Jar. + ((Jar) _generateJavadocJarTask).from(_generateJavadocTask.getDestinationDir()); + _generateJavadocJarTask.dependsOn(_generateJavadocTask); + } + } + + private static void deleteGeneratedDir(Project project, SourceSet sourceSet, String dirType) + { + String generatedDirPath = getGeneratedDirPath(project, sourceSet, dirType); + project.getLogger().info("Delete generated directory {}", generatedDirPath); + project.delete(generatedDirPath); + } + + private static > Class getCompatibilityLevelClass(Project project) + { + ClassLoader generatorClassLoader = (ClassLoader) project.property(GENERATOR_CLASSLOADER_NAME); + + String className = "com.linkedin.restli.tools.idlcheck.CompatibilityLevel"; + try + { + @SuppressWarnings("unchecked") + Class enumClass = (Class) generatorClassLoader.loadClass(className).asSubclass(Enum.class); + return enumClass; + } + catch (ClassNotFoundException e) + { + throw new RuntimeException("Could not load class " + className); + } + } + + private static void addGeneratedDir(Project project, SourceSet sourceSet, Collection configurations) + { + project.getPlugins().withType(IdeaPlugin.class, ideaPlugin -> { + IdeaModule ideaModule = ideaPlugin.getModel().getModule(); + // stupid if block needed because of stupid assignment required to update source dirs + if (isTestSourceSet(sourceSet)) + { + Set sourceDirs = ideaModule.getTestSourceDirs(); + sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); + // this is stupid but assignment is required + ideaModule.setTestSourceDirs(sourceDirs); + if (debug) + { + System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule testSourceDirs " + + ideaModule.getTestSourceDirs()); + } + } + else + { + Set sourceDirs = ideaModule.getSourceDirs(); + sourceDirs.addAll(sourceSet.getJava().getSrcDirs()); + // this is stupid but assignment is required + ideaModule.setSourceDirs(sourceDirs); + if (debug) + { + System.out.println("Added " + sourceSet.getJava().getSrcDirs() + " to IdeaModule sourceDirs " + + ideaModule.getSourceDirs()); + } + } + Collection compilePlus = ideaModule.getScopes().get("COMPILE").get("plus"); + compilePlus.addAll(configurations); + ideaModule.getScopes().get("COMPILE").put("plus", compilePlus); + }); + } + + private static void checkAvroSchemaExist(Project project, SourceSet sourceSet) + { + String sourceDir = "src" + File.separatorChar + sourceSet.getName(); + File avroSourceDir = project.file(sourceDir + File.separatorChar + "avro"); + if (avroSourceDir.exists()) + { + project.getLogger().lifecycle("{}'s {} has non-empty avro directory. pegasus plugin does not process avro directory", + project.getName(), sourceDir); + } + } + + // Compute the name of the source set that will contain a type of an input generated code. + // e.g. genType may be 'DataTemplate' or 'Rest' + private static String getGeneratedSourceSetName(SourceSet sourceSet, String genType) + { + return sourceSet.getName() + "Generated" + genType; + } + + // Compute the directory name that will contain a type generated code of an input source set. + // e.g. genType may be 'DataTemplate' or 'Rest' + public static String getGeneratedDirPath(Project project, SourceSet sourceSet, String genType) + { + String override = getOverridePath(project, sourceSet, "overrideGeneratedDir"); + String sourceSetName = getGeneratedSourceSetName(sourceSet, genType); + String base = override == null ? "src" : override; + + return base + File.separatorChar + sourceSetName; + } + + public static String getDataSchemaPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overridePegasusDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "pegasus"; + } + else + { + return override; + } + } + + private static String getExtensionSchemaPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideExtensionSchemaDir"); + if(override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "extensions"; + } + else + { + return override; + } + } + + private static String getSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideSnapshotDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "snapshot"; + } + else + { + return override; + } + } + + private static String getIdlPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, "overrideIdlDir"); + if (override == null) + { + return "src" + File.separatorChar + sourceSet.getName() + File.separatorChar + "idl"; + } + else + { + return override; + } + } + + private static String getPegasusSchemaSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, PEGASUS_SCHEMA_SNAPSHOT_DIR_OVERRIDE); + if (override == null) + { + return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_SCHEMA_SNAPSHOT_DIR; + } + else + { + return override; + } + } + + private static String getPegasusExtensionSchemaSnapshotPath(Project project, SourceSet sourceSet) + { + String override = getOverridePath(project, sourceSet, PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR_OVERRIDE); + if (override == null) + { + return SRC + File.separatorChar + sourceSet.getName() + File.separatorChar + PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_DIR; + } + else + { + return override; + } + } + + private static String getOverridePath(Project project, SourceSet sourceSet, String overridePropertyName) + { + String sourceSetPropertyName = sourceSet.getName() + '.' + overridePropertyName; + String override = getNonEmptyProperty(project, sourceSetPropertyName); + + if (override == null && sourceSet.getName().equals("main")) + { + override = getNonEmptyProperty(project, overridePropertyName); + } + + return override; + } + + private static boolean isTestSourceSet(SourceSet sourceSet) + { + return TEST_DIR_REGEX.matcher(sourceSet.getName()).find(); + } + + private static Configuration getDataModelConfig(Project project, SourceSet sourceSet) + { + return isTestSourceSet(sourceSet) + ? project.getConfigurations().getByName("testDataModel") + : project.getConfigurations().getByName("dataModel"); + } + + private static boolean isTaskSuccessful(Task task) + { + return task.getState().getExecuted() + // Task is not successful if it is not upto date and is skipped. + && !(task.getState().getSkipped() && !task.getState().getUpToDate()) + && task.getState().getFailure() == null; + } + + private static boolean isResultEquivalent(File compatibilityLogFile) + { + return isResultEquivalent(compatibilityLogFile, false); + } + + private static boolean isResultEquivalent(File compatibilityLogFile, boolean restSpecOnly) + { + CompatibilityLogChecker logChecker = new CompatibilityLogChecker(); + try + { + logChecker.write(Files.readAllBytes(compatibilityLogFile.toPath())); + } + catch (IOException e) + { + throw new GradleException("Error while processing compatibility report: " + e.getMessage()); + } + return logChecker.getRestSpecCompatibility().isEmpty() && + (restSpecOnly || logChecker.getModelCompatibility().isEmpty()); + } + + protected void configureRestModelGeneration(Project project, SourceSet sourceSet) + { + if (sourceSet.getAllSource().isEmpty()) + { + project.getLogger().info("No source files found for sourceSet {}. Skipping idl generation.", sourceSet.getName()); + return; + } + + // afterEvaluate needed so that api project can be overridden via ext.apiProject + project.afterEvaluate(p -> + { + // find api project here instead of in each project's plugin configuration + // this allows api project relation options (ext.api*) to be specified anywhere in the build.gradle file + // alternatively, pass closures to task configuration, and evaluate the closures when task is executed + Project apiProject = getCheckedApiProject(project); + + // make sure the api project is evaluated. Important for configure-on-demand mode. + if (apiProject != null) + { + project.evaluationDependsOn(apiProject.getPath()); + + if (!apiProject.getPlugins().hasPlugin(_thisPluginType)) + { + apiProject = null; + } + } + + if (apiProject == null) + { + return; + } + + Task untypedJarTask = project.getTasks().findByName(sourceSet.getJarTaskName()); + if (!(untypedJarTask instanceof Jar)) + { + return; + } + Jar jarTask = (Jar) untypedJarTask; + + String snapshotCompatPropertyName = findProperty(FileCompatibilityType.SNAPSHOT); + if (project.hasProperty(snapshotCompatPropertyName) && "off".equalsIgnoreCase((String) project.property(snapshotCompatPropertyName))) + { + project.getLogger().lifecycle("Project {} snapshot compatibility level \"OFF\" is deprecated. Default to \"IGNORE\".", + project.getPath()); + } + + // generate the rest model + FileCollection restModelCodegenClasspath = project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME)) + .plus(sourceSet.getRuntimeClasspath()); + String destinationDirPrefix = getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) + File.separatorChar; + FileCollection restModelResolverPath = apiProject.files(getDataSchemaPath(project, sourceSet)) + .plus(getDataModelConfig(apiProject, sourceSet)); + Set watchedRestModelInputDirs = buildWatchedRestModelInputDirs(project, sourceSet); + Set restModelInputDirs = difference(sourceSet.getAllSource().getSrcDirs(), + sourceSet.getResources().getSrcDirs()); + + Task generateRestModelTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "restModel"), GenerateRestModelTask.class, task -> + { + task.dependsOn(project.getTasks().getByName(sourceSet.getClassesTaskName())); + task.setCodegenClasspath(restModelCodegenClasspath); + task.setWatchedCodegenClasspath(restModelCodegenClasspath + .filter(file -> !"main".equals(file.getName()) && !"classes".equals(file.getName()))); + task.setInputDirs(restModelInputDirs); + task.setWatchedInputDirs(watchedRestModelInputDirs.isEmpty() + ? restModelInputDirs : watchedRestModelInputDirs); + // we need all the artifacts from runtime for any private implementation classes the server code might need. + task.setSnapshotDestinationDir(project.file(destinationDirPrefix + "snapshot")); + task.setIdlDestinationDir(project.file(destinationDirPrefix + "idl")); + + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + task.setIdlOptions(pegasusOptions.get(sourceSet.getName()).idlOptions); + + task.setResolverPath(restModelResolverPath); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> !isPropertyTrue(project, SKIP_GENERATE_REST_MODEL)); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, REST_GEN_TYPE))); + }); + + File apiSnapshotDir = apiProject.file(getSnapshotPath(apiProject, sourceSet)); + File apiIdlDir = apiProject.file(getIdlPath(apiProject, sourceSet)); + apiSnapshotDir.mkdirs(); + + if (!isPropertyTrue(project, SKIP_IDL_CHECK)) + { + apiIdlDir.mkdirs(); + } + + CheckRestModelTask checkRestModelTask = project.getTasks() + .create(sourceSet.getTaskName("check", "RestModel"), CheckRestModelTask.class, task -> + { + task.dependsOn(generateRestModelTask); + task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.setPreviousSnapshotDirectory(apiSnapshotDir); + task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.setPreviousIdlDirectory(apiIdlDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setModelCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); + task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK)); + + task.doLast(new CacheableAction<>(t -> + { + if (!task.isEquivalent()) + { + _restModelCompatMessage.append(task.getWholeMessage()); + } + })); + }); + + CheckSnapshotTask checkSnapshotTask = project.getTasks() + .create(sourceSet.getTaskName("check", "Snapshot"), CheckSnapshotTask.class, task -> { + task.dependsOn(generateRestModelTask); + task.setCurrentSnapshotFiles(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.setPreviousSnapshotDirectory(apiSnapshotDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSnapshotCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.SNAPSHOT)); + + task.onlyIf(t -> isPropertyTrue(project, SKIP_IDL_CHECK)); + }); + + CheckIdlTask checkIdlTask = project.getTasks() + .create(sourceSet.getTaskName("check", "Idl"), CheckIdlTask.class, task -> + { + task.dependsOn(generateRestModelTask); + task.setCurrentIdlFiles(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.setPreviousIdlDirectory(apiIdlDir); + task.setResolverPath(restModelResolverPath); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setIdlCompatLevel(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + + task.onlyIf(t -> !isPropertyTrue(project, SKIP_IDL_CHECK) + && !"OFF".equals(PropertyUtil.findCompatLevel(project, FileCompatibilityType.IDL))); + }); + + // rest model publishing involves cross-project reference + // configure after all projects have been evaluated + // the file copy can be turned off by "rest.model.noPublish" flag + Task publishRestliSnapshotTask = project.getTasks() + .create(sourceSet.getTaskName("publish", "RestliSnapshot"), PublishRestModelTask.class, task -> + { + task.dependsOn(checkRestModelTask, checkSnapshotTask, checkIdlTask); + task.from(SharedFileUtils.getSnapshotFiles(project, destinationDirPrefix)); + task.into(apiSnapshotDir); + task.setSuffix(SNAPSHOT_FILE_SUFFIX); + + task.onlyIf(t -> + isPropertyTrue(project, SNAPSHOT_FORCE_PUBLISH) || + ( + !isPropertyTrue(project, SNAPSHOT_NO_PUBLISH) && + ( + ( + isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkSnapshotTask) && + checkSnapshotTask.getSummaryTarget().exists() && + !isResultEquivalent(checkSnapshotTask.getSummaryTarget()) + ) || + ( + !isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkRestModelTask) && + checkRestModelTask.getSummaryTarget().exists() && + !isResultEquivalent(checkRestModelTask.getSummaryTarget()) + ) + )) + ); + }); + + Task publishRestliIdlTask = project.getTasks() + .create(sourceSet.getTaskName("publish", "RestliIdl"), PublishRestModelTask.class, task -> { + task.dependsOn(checkRestModelTask, checkIdlTask, checkSnapshotTask); + task.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + task.into(apiIdlDir); + task.setSuffix(IDL_FILE_SUFFIX); + + task.onlyIf(t -> + isPropertyTrue(project, IDL_FORCE_PUBLISH) || + ( + !isPropertyTrue(project, IDL_NO_PUBLISH) && + ( + ( + isPropertyTrue(project, SKIP_IDL_CHECK) && + isTaskSuccessful(checkSnapshotTask) && + checkSnapshotTask.getSummaryTarget().exists() && + !isResultEquivalent(checkSnapshotTask.getSummaryTarget(), true) + ) || + ( + !isPropertyTrue(project, SKIP_IDL_CHECK) && + ( + (isTaskSuccessful(checkRestModelTask) && + checkRestModelTask.getSummaryTarget().exists() && + !isResultEquivalent(checkRestModelTask.getSummaryTarget(), true)) || + (isTaskSuccessful(checkIdlTask) && + checkIdlTask.getSummaryTarget().exists() && + !isResultEquivalent(checkIdlTask.getSummaryTarget())) + ) + ) + )) + ); + }); + + project.getLogger().info("API project selected for {} is {}", + publishRestliIdlTask.getPath(), apiProject.getPath()); + + jarTask.from(SharedFileUtils.getIdlFiles(project, destinationDirPrefix)); + // add generated .restspec.json files as resources to the jar + jarTask.dependsOn(publishRestliSnapshotTask, publishRestliIdlTask); + + ChangedFileReportTask changedFileReportTask = (ChangedFileReportTask) project.getTasks() + .getByName("changedFilesReport"); + + // Use the files from apiDir for generating the changed files report as we need to notify user only when + // source system files are modified. + changedFileReportTask.setIdlFiles(SharedFileUtils.getSuffixedFiles(project, apiIdlDir, IDL_FILE_SUFFIX)); + changedFileReportTask.setSnapshotFiles(SharedFileUtils.getSuffixedFiles(project, apiSnapshotDir, + SNAPSHOT_FILE_SUFFIX)); + changedFileReportTask.mustRunAfter(publishRestliSnapshotTask, publishRestliIdlTask); + changedFileReportTask.doLast(new CacheableAction<>(t -> + { + if (!changedFileReportTask.getNeedCheckinFiles().isEmpty()) + { + project.getLogger().info("Adding modified files to need checkin list..."); + _needCheckinFiles.addAll(changedFileReportTask.getNeedCheckinFiles()); + _needBuildFolders.add(getCheckedApiProject(project).getPath()); + } + })); + }); + } + + protected void configurePegasusSchemaSnapshotGeneration(Project project, SourceSet sourceSet, boolean isExtensionSchema) + { + File schemaDir = isExtensionSchema? project.file(getExtensionSchemaPath(project, sourceSet)) + : project.file(getDataSchemaPath(project, sourceSet)); + + if ((isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, PDL_FILE_SUFFIX).isEmpty()) || + (!isExtensionSchema && SharedFileUtils.getSuffixedFiles(project, schemaDir, DATA_TEMPLATE_FILE_SUFFIXES).isEmpty())) + { + return; + } + + Path publishablePegasusSchemaSnapshotDir = project.getBuildDir().toPath().resolve(sourceSet.getName() + + (isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT)); + + Task generatePegasusSchemaSnapshot = generatePegasusSchemaSnapshot(project, sourceSet, + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, schemaDir, + publishablePegasusSchemaSnapshotDir.toFile(), isExtensionSchema); + + File pegasusSchemaSnapshotDir = project.file(isExtensionSchema ? getPegasusExtensionSchemaSnapshotPath(project, sourceSet) + : getPegasusSchemaSnapshotPath(project, sourceSet)); + pegasusSchemaSnapshotDir.mkdirs(); + + Task checkSchemaSnapshot = project.getTasks().create(sourceSet.getTaskName("check", + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT), + CheckPegasusSnapshotTask.class, task -> + { + task.dependsOn(generatePegasusSchemaSnapshot); + task.setCurrentSnapshotDirectory(publishablePegasusSchemaSnapshotDir.toFile()); + task.setPreviousSnapshotDirectory(pegasusSchemaSnapshotDir); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION) + .plus(project.getConfigurations().getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)) + .plus(project.getConfigurations().getByName(JavaPlugin.RUNTIME_CLASSPATH_CONFIGURATION_NAME))); + task.setCompatibilityLevel(isExtensionSchema ? + PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) + :PropertyUtil.findCompatLevel(project, FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT)); + task.setCompatibilityMode(isExtensionSchema ? COMPATIBILITY_OPTIONS_MODE_EXTENSION : + PropertyUtil.findCompatMode(project, PEGASUS_COMPATIBILITY_MODE)); + task.setExtensionSchema(isExtensionSchema); + task.setHandlerJarPath(project.getConfigurations() .getByName(SCHEMA_ANNOTATION_HANDLER_CONFIGURATION)); + + task.onlyIf(t -> + { + String pegasusSnapshotCompatPropertyName = isExtensionSchema ? + findProperty(FileCompatibilityType.PEGASUS_EXTENSION_SCHEMA_SNAPSHOT) + : findProperty(FileCompatibilityType.PEGASUS_SCHEMA_SNAPSHOT); + return !project.hasProperty(pegasusSnapshotCompatPropertyName) || + !"off".equalsIgnoreCase((String) project.property(pegasusSnapshotCompatPropertyName)); + }); + }); + + Task publishPegasusSchemaSnapshot = publishPegasusSchemaSnapshot(project, sourceSet, + isExtensionSchema ? PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: PEGASUS_SCHEMA_SNAPSHOT, checkSchemaSnapshot, + publishablePegasusSchemaSnapshotDir.toFile(), pegasusSchemaSnapshotDir); + + project.getTasks().getByName(LifecycleBasePlugin.ASSEMBLE_TASK_NAME).dependsOn(publishPegasusSchemaSnapshot); + } + + @SuppressWarnings("deprecation") + protected void configureAvroSchemaGeneration(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + File avroDir = project.file(getGeneratedDirPath(project, sourceSet, AVRO_SCHEMA_GEN_TYPE) + + File.separatorChar + "avro"); + + // generate avro schema files from data schema + Task generateAvroSchemaTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "avroSchema"), GenerateAvroSchemaTask.class, task -> { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(avroDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> + { + if (task.getInputDir().exists()) + { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + if (pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.AVRO)) + { + return true; + } + } + + return !project.getConfigurations().getByName("avroSchemaGenerator").isEmpty(); + }); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, AVRO_SCHEMA_GEN_TYPE))); + }); + + project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(generateAvroSchemaTask); + + // create avro schema jar file + + Task avroSchemaJarTask = project.getTasks().create(sourceSet.getName() + "AvroSchemaJar", Jar.class, task -> + { + // add path prefix to each file in the data schema directory + task.from(avroDir, copySpec -> + copySpec.eachFile(fileCopyDetails -> + fileCopyDetails.setPath("avro" + File.separatorChar + fileCopyDetails.getPath()))); + + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "avro-schema")); + task.setDescription("Generate an avro schema jar"); + }); + + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("avroSchema", avroSchemaJarTask); + } + else + { + project.getArtifacts().add("testAvroSchema", avroSchemaJarTask); + } + } + + protected void configureConversionUtilities(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + boolean reverse = isPropertyTrue(project, CONVERT_TO_PDL_REVERSE); + boolean keepOriginal = isPropertyTrue(project, CONVERT_TO_PDL_KEEP_ORIGINAL); + boolean skipVerification = isPropertyTrue(project, CONVERT_TO_PDL_SKIP_VERIFICATION); + String preserveSourceCmd = getNonEmptyProperty(project, CONVERT_TO_PDL_PRESERVE_SOURCE_CMD); + + // Utility task for migrating between PDSC and PDL. + project.getTasks().create(sourceSet.getTaskName("convert", "ToPdl"), TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(dataSchemaDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setPreserveSourceCmd(preserveSourceCmd); + if (reverse) + { + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDSC); + } + else + { + task.setSourceFormat(SchemaFileType.PDSC); + task.setDestinationFormat(SchemaFileType.PDL); + } + task.setKeepOriginal(keepOriginal); + task.setSkipVerification(skipVerification); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> task.getInputDir().exists()); + task.doLast(new CacheableAction<>(t -> + { + project.getLogger().lifecycle("Pegasus schema conversion complete."); + project.getLogger().lifecycle("All pegasus schema files in " + dataSchemaDir + " have been converted"); + project.getLogger().lifecycle("You can use '-PconvertToPdl.reverse=true|false' to change the direction of conversion."); + })); + }); + + // Helper task for reformatting existing PDL schemas by generating them again. + project.getTasks().create(sourceSet.getTaskName("reformat", "Pdl"), TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(dataSchemaDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDL); + task.setKeepOriginal(true); + task.setSkipVerification(true); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + + task.onlyIf(t -> task.getInputDir().exists()); + task.doLast(new CacheableAction<>(t -> project.getLogger().lifecycle("PDL reformat complete."))); + }); + } + + @SuppressWarnings("deprecation") + protected GenerateDataTemplateTask configureDataTemplateGeneration(Project project, SourceSet sourceSet) + { + File dataSchemaDir = project.file(getDataSchemaPath(project, sourceSet)); + File generatedDataTemplateDir = project.file(getGeneratedDirPath(project, sourceSet, DATA_TEMPLATE_GEN_TYPE) + + File.separatorChar + "java"); + File publishableSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "Schemas"); + File publishableLegacySchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "LegacySchemas"); + File publishableExtensionSchemasBuildDir = project.file(project.getBuildDir().getAbsolutePath() + + File.separatorChar + sourceSet.getName() + "ExtensionSchemas"); + + // generate data template source files from data schema + GenerateDataTemplateTask generateDataTemplatesTask = project.getTasks() + .create(sourceSet.getTaskName("generate", "dataTemplate"), GenerateDataTemplateTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(generatedDataTemplateDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) + { + task.setGenerateLowercasePath(false); + } + + task.onlyIf(t -> + { + if (task.getInputDir().exists()) + { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + return pegasusOptions.get(sourceSet.getName()).hasGenerationMode(PegasusOptions.GenerationMode.PEGASUS); + } + + return false; + }); + + task.doFirst(new CacheableAction<>(t -> deleteGeneratedDir(project, sourceSet, DATA_TEMPLATE_GEN_TYPE))); + }); + + // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. + ((Jar) _generateSourcesJarTask).from(generateDataTemplatesTask.getDestinationDir()); + _generateSourcesJarTask.dependsOn(generateDataTemplatesTask); + + _generateJavadocTask.source(generateDataTemplatesTask.getDestinationDir()); + _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() + .plus(project.getConfigurations().getByName("dataTemplateCompile")) + .plus(generateDataTemplatesTask.getResolverPath())); + _generateJavadocTask.dependsOn(generateDataTemplatesTask); + + // Add extra dependencies for data model compilation + project.getDependencies().add("dataTemplateCompile", "com.google.code.findbugs:jsr305:3.0.2"); + + // create new source set for generated java source and class files + String targetSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> + { + ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedDataTemplateDir)); + ss.setCompileClasspath(getDataModelConfig(project, sourceSet) + .plus(project.getConfigurations().getByName("dataTemplateCompile"))); + }); + + // idea plugin needs to know about new generated java source directory and its dependencies + addGeneratedDir(project, targetSourceSet, Arrays.asList( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("dataTemplateCompile"))); + + // Set source compatibility to 1.8 as the data-templates now generate code with Java 8 features. + JavaCompile compileTask = project.getTasks() + .withType(JavaCompile.class).getByName(targetSourceSet.getCompileJavaTaskName()); + compileTask.doFirst(new CacheableAction<>(task -> { + ((JavaCompile) task).setSourceCompatibility("1.8"); + ((JavaCompile) task).setTargetCompatibility("1.8"); + })); + // make sure that java source files have been generated before compiling them + compileTask.dependsOn(generateDataTemplatesTask); + + // Dummy task to maintain backward compatibility + // TODO: Delete this task once use cases have had time to reference the new task + Task destroyStaleFiles = project.getTasks().create(sourceSet.getName() + "DestroyStaleFiles", Delete.class); + destroyStaleFiles.onlyIf(task -> { + project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); + return false; + }); + + // Dummy task to maintain backward compatibility, as this task was replaced by CopySchemas + // TODO: Delete this task once use cases have had time to reference the new task + Task copyPdscSchemasTask = project.getTasks().create(sourceSet.getName() + "CopyPdscSchemas", Copy.class); + copyPdscSchemasTask.dependsOn(destroyStaleFiles); + copyPdscSchemasTask.onlyIf(task -> { + project.getLogger().lifecycle("{} task is a NO-OP task.", task.getPath()); + return false; + }); + + // Prepare schema files for publication by syncing schema folders. + Task prepareSchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "CopySchemas", Sync.class, task -> + { + task.from(dataSchemaDir, syncSpec -> DATA_TEMPLATE_FILE_SUFFIXES.forEach(suffix -> syncSpec.include("**/*" + suffix))); + task.into(publishableSchemasBuildDir); + }); + prepareSchemasForPublishTask.dependsOn(copyPdscSchemasTask); + + Collection dataTemplateJarDepends = new ArrayList<>(); + dataTemplateJarDepends.add(compileTask); + dataTemplateJarDepends.add(prepareSchemasForPublishTask); + + // Convert all PDL files back to PDSC for publication + // TODO: Remove this conversion permanently once translated PDSCs are no longer needed. + Task prepareLegacySchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "TranslateSchemas", TranslateSchemasTask.class, task -> + { + task.setInputDir(dataSchemaDir); + task.setDestinationDir(publishableLegacySchemasBuildDir); + task.setResolverPath(getDataModelConfig(project, sourceSet)); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setSourceFormat(SchemaFileType.PDL); + task.setDestinationFormat(SchemaFileType.PDSC); + task.setKeepOriginal(true); + task.setSkipVerification(true); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + + prepareLegacySchemasForPublishTask.dependsOn(destroyStaleFiles); + dataTemplateJarDepends.add(prepareLegacySchemasForPublishTask); + + // extension schema directory + File extensionSchemaDir = project.file(getExtensionSchemaPath(project, sourceSet)); + + if (!SharedFileUtils.getSuffixedFiles(project, extensionSchemaDir, PDL_FILE_SUFFIX).isEmpty()) + { + // Validate extension schemas if extension schemas are provided. + ValidateExtensionSchemaTask validateExtensionSchemaTask = project.getTasks() + .create(sourceSet.getTaskName("validate", "ExtensionSchemas"), ValidateExtensionSchemaTask.class, task -> + { + task.setInputDir(extensionSchemaDir); + task.setResolverPath( + getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); + task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + + Task prepareExtensionSchemasForPublishTask = project.getTasks() + .create(sourceSet.getName() + "CopyExtensionSchemas", Sync.class, task -> + { + task.from(extensionSchemaDir, syncSpec -> syncSpec.include("**/*" + PDL_FILE_SUFFIX)); + task.into(publishableExtensionSchemasBuildDir); + }); + + prepareExtensionSchemasForPublishTask.dependsOn(validateExtensionSchemaTask); + prepareExtensionSchemasForPublishTask.dependsOn(copyPdscSchemasTask); + dataTemplateJarDepends.add(prepareExtensionSchemasForPublishTask); + } + + // include pegasus files in the output of this SourceSet + project.getTasks().withType(ProcessResources.class).getByName(targetSourceSet.getProcessResourcesTaskName(), it -> + { + it.from(prepareSchemasForPublishTask, copy -> copy.into("pegasus")); + // TODO: Remove this permanently once translated PDSCs are no longer needed. + it.from(prepareLegacySchemasForPublishTask, copy -> copy.into(TRANSLATED_SCHEMAS_DIR)); + Sync copyExtensionSchemasTask = project.getTasks().withType(Sync.class).findByName(sourceSet.getName() + "CopyExtensionSchemas"); + if (copyExtensionSchemasTask != null) + { + it.from(copyExtensionSchemasTask, copy -> copy.into("extensions")); + } + }); + + // create data template jar file + Jar dataTemplateJarTask = project.getTasks() + .create(sourceSet.getName() + "DataTemplateJar", Jar.class, task -> + { + task.dependsOn(dataTemplateJarDepends); + task.from(targetSourceSet.getOutput()); + + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "data-template")); + task.setDescription("Generate a data template jar"); + }); + + // add the data model and date template jars to the list of project artifacts. + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("dataTemplate", dataTemplateJarTask); + } + else + { + project.getArtifacts().add("testDataTemplate", dataTemplateJarTask); + } + + // include additional dependencies into the appropriate configuration used to compile the input source set + // must include the generated data template classes and their dependencies the configuration. + // "compile" and "testCompile" configurations have been removed in Gradle 7, + // but to keep the maximum backward compatibility, here we handle Gradle 7 and earlier version differently + // Once MIN_REQUIRED_VERSION reaches 7.0, we can remove the check of isAtLeastGradle7() + String compileConfigName; + if (isAtLeastGradle7()) { + compileConfigName = isTestSourceSet(sourceSet) ? "testImplementation" : project.getConfigurations().findByName("api") != null ? "api" : "implementation"; + } + else + { + compileConfigName = isTestSourceSet(sourceSet) ? "testCompile" : "compile"; + } + + Configuration compileConfig = project.getConfigurations().maybeCreate(compileConfigName); + compileConfig.extendsFrom( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("dataTemplateCompile")); + + // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. + // Replace it with getArchiveFile() on Gradle 7, + // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 + // DataHub Note - applied FIXME + project.getDependencies().add(compileConfigName, project.files( + isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); + + if (_configureIvyPublications) { + // The below Action is only applied when the 'ivy-publish' is applied by the consumer. + // If the consumer does not use ivy-publish, this is a noop. + // this Action prepares the project applying the pegasus plugin to publish artifacts using these steps: + // 1. Registers "feature variants" for pegasus-specific artifacts; + // see https://docs.gradle.org/6.1/userguide/feature_variants.html + // 2. Wires legacy configurations like `dataTemplateCompile` to auto-generated feature variant *Api and + // *Implementation configurations for backwards compatibility. + // 3. Configures the Ivy Publication to include auto-generated feature variant *Api and *Implementation + // configurations and their dependencies. + project.getPlugins().withType(IvyPublishPlugin.class, ivyPublish -> { + if (!isAtLeastGradle61()) + { + throw new GradleException("Using the ivy-publish plugin with the pegasus plugin requires Gradle 6.1 or higher " + + "at build time. Please upgrade."); + } + + JavaPluginExtension java = project.getExtensions().getByType(JavaPluginExtension.class); + // create new capabilities per source set; automatically creates api and implementation configurations + String featureName = mapSourceSetToFeatureName(targetSourceSet); + try + { + /* + reflection is required to preserve compatibility with Gradle 5.2.1 and below + TODO once Gradle 5.3+ is required, remove reflection and replace with: + java.registerFeature(featureName, featureSpec -> { + featureSpec.usingSourceSet(targetSourceSet); + }); + */ + Method registerFeature = JavaPluginExtension.class.getDeclaredMethod("registerFeature", String.class, Action.class); + Action/**/ featureSpecAction = createFeatureVariantFromSourceSet(targetSourceSet); + registerFeature.invoke(java, featureName, featureSpecAction); + } + catch (ReflectiveOperationException e) + { + throw new GradleException("Unable to register new feature variant", e); + } + + // expose transitive dependencies to consumers via variant configurations + Configuration featureConfiguration = project.getConfigurations().getByName(featureName); + Configuration mainGeneratedDataTemplateApi = project.getConfigurations().getByName(targetSourceSet.getApiConfigurationName()); + featureConfiguration.extendsFrom(mainGeneratedDataTemplateApi); + mainGeneratedDataTemplateApi.extendsFrom( + getDataModelConfig(project, targetSourceSet), + project.getConfigurations().getByName("dataTemplateCompile")); + + // Configure the existing IvyPublication + // For backwards-compatibility, make the legacy dataTemplate/testDataTemplate configurations extend + // their replacements, auto-created when we registered the new feature variant + project.afterEvaluate(p -> { + PublishingExtension publishing = p.getExtensions().getByType(PublishingExtension.class); + // When configuring a Gradle Publication, use this value to find the name of the publication to configure. Defaults to "ivy". + String publicationName = p.getExtensions().getExtraProperties().getProperties().getOrDefault("PegasusPublicationName", "ivy").toString(); + IvyPublication ivyPublication = publishing.getPublications().withType(IvyPublication.class).getByName(publicationName); + ivyPublication.configurations(configurations -> configurations.create(featureName, legacyConfiguration -> { + legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getApiElementsConfigurationName()).getName()); + legacyConfiguration.extend(p.getConfigurations().getByName(targetSourceSet.getRuntimeElementsConfigurationName()).getName()); + })); + }); + }); + } + + if (debug) + { + System.out.println("configureDataTemplateGeneration sourceSet " + sourceSet.getName()); + System.out.println(compileConfigName + ".allDependencies : " + + project.getConfigurations().getByName(compileConfigName).getAllDependencies()); + System.out.println(compileConfigName + ".extendsFrom: " + + project.getConfigurations().getByName(compileConfigName).getExtendsFrom()); + System.out.println(compileConfigName + ".transitive: " + + project.getConfigurations().getByName(compileConfigName).isTransitive()); + } + + project.getTasks().getByName(sourceSet.getCompileJavaTaskName()).dependsOn(dataTemplateJarTask); + return generateDataTemplatesTask; + } + + private String mapSourceSetToFeatureName(SourceSet sourceSet) { + String featureName = ""; + switch (sourceSet.getName()) { + case "mainGeneratedDataTemplate": + featureName = "dataTemplate"; + break; + case "testGeneratedDataTemplate": + featureName = "testDataTemplate"; + break; + case "mainGeneratedRest": + featureName = "restClient"; + break; + case "testGeneratedRest": + featureName = "testRestClient"; + break; + case "mainGeneratedAvroSchema": + featureName = "avroSchema"; + break; + case "testGeneratedAvroSchema": + featureName = "testAvroSchema"; + break; + default: + String msg = String.format("Unable to map %s to an appropriate feature name", sourceSet); + throw new GradleException(msg); + } + return featureName; + } + + // Generate rest client from idl files generated from java source files in the specified source set. + // + // This generates rest client source files from idl file generated from java source files + // in the source set. The generated rest client source files will be in a new source set. + // It also compiles the rest client source files into classes, and creates both the + // rest model and rest client jar files. + // + @SuppressWarnings("deprecation") + protected void configureRestClientGeneration(Project project, SourceSet sourceSet) + { + // idl directory for api project + File idlDir = project.file(getIdlPath(project, sourceSet)); + if (SharedFileUtils.getSuffixedFiles(project, idlDir, IDL_FILE_SUFFIX).isEmpty() && !isPropertyTrue(project, + PROCESS_EMPTY_IDL_DIR)) + { + return; + } + File generatedRestClientDir = project.file(getGeneratedDirPath(project, sourceSet, REST_GEN_TYPE) + + File.separatorChar + "java"); + + // always include imported data template jars in compileClasspath of rest client + FileCollection dataModelConfig = getDataModelConfig(project, sourceSet); + + // if data templates generated from this source set, add the generated data template jar to compileClasspath + // of rest client. + String dataTemplateSourceSetName = getGeneratedSourceSetName(sourceSet, DATA_TEMPLATE_GEN_TYPE); + + Jar dataTemplateJarTask = null; + + SourceSetContainer sourceSets = project.getConvention() + .getPlugin(JavaPluginConvention.class).getSourceSets(); + + FileCollection dataModels; + if (sourceSets.findByName(dataTemplateSourceSetName) != null) + { + if (debug) + { + System.out.println("sourceSet " + sourceSet.getName() + " has generated sourceSet " + dataTemplateSourceSetName); + } + dataTemplateJarTask = (Jar) project.getTasks().getByName(sourceSet.getName() + "DataTemplateJar"); + // The getArchivePath() API doesn’t carry any task dependency and has been deprecated. + // Replace it with getArchiveFile() on Gradle 7, + // but keep getArchivePath() to be backwards-compatibility with Gradle version older than 5.1 + // DataHub Note - applied FIXME + dataModels = dataModelConfig.plus(project.files( + isAtLeastGradle7() ? dataTemplateJarTask.getArchiveFile() : dataTemplateJarTask.getArchivePath())); + } + else + { + dataModels = dataModelConfig; + } + + // create source set for generated rest model, rest client source and class files. + String targetSourceSetName = getGeneratedSourceSetName(sourceSet, REST_GEN_TYPE); + SourceSet targetSourceSet = sourceSets.create(targetSourceSetName, ss -> + { + ss.java(sourceDirectorySet -> sourceDirectorySet.srcDir(generatedRestClientDir)); + ss.setCompileClasspath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); + }); + + project.getPlugins().withType(EclipsePlugin.class, eclipsePlugin -> { + EclipseModel eclipseModel = (EclipseModel) project.getExtensions().findByName("eclipse"); + eclipseModel.getClasspath().getPlusConfigurations() + .add(project.getConfigurations().getByName("restClientCompile")); + }); + + // idea plugin needs to know about new rest client source directory and its dependencies + addGeneratedDir(project, targetSourceSet, Arrays.asList( + getDataModelConfig(project, sourceSet), + project.getConfigurations().getByName("restClientCompile"))); + + // generate the rest client source files + GenerateRestClientTask generateRestClientTask = project.getTasks() + .create(targetSourceSet.getTaskName("generate", "restClient"), GenerateRestClientTask.class, task -> + { + task.dependsOn(project.getConfigurations().getByName("dataTemplate")); + task.setInputDir(idlDir); + task.setResolverPath(dataModels.plus(project.getConfigurations().getByName("restClientCompile"))); + task.setRuntimeClasspath(project.getConfigurations().getByName("dataModel") + .plus(project.getConfigurations().getByName("dataTemplate").getArtifacts().getFiles())); + task.setCodegenClasspath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setDestinationDir(generatedRestClientDir); + task.setRestli2FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_2)); + task.setRestli1FormatSuppressed(project.hasProperty(SUPPRESS_REST_CLIENT_RESTLI_1)); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + if (isPropertyTrue(project, CODE_GEN_PATH_CASE_SENSITIVE)) + { + task.setGenerateLowercasePath(false); + } + if (isPropertyTrue(project, ENABLE_FLUENT_API)) + { + task.setGenerateFluentApi(true); + } + task.doFirst(new CacheableAction<>(t -> project.delete(generatedRestClientDir))); + }); + + if (dataTemplateJarTask != null) + { + generateRestClientTask.dependsOn(dataTemplateJarTask); + } + + // TODO: Tighten the types so that _generateSourcesJarTask must be of type Jar. + ((Jar) _generateSourcesJarTask).from(generateRestClientTask.getDestinationDir()); + _generateSourcesJarTask.dependsOn(generateRestClientTask); + + _generateJavadocTask.source(generateRestClientTask.getDestinationDir()); + _generateJavadocTask.setClasspath(_generateJavadocTask.getClasspath() + .plus(project.getConfigurations().getByName("restClientCompile")) + .plus(generateRestClientTask.getResolverPath())); + _generateJavadocTask.dependsOn(generateRestClientTask); + + // make sure rest client source files have been generated before compiling them + JavaCompile compileGeneratedRestClientTask = (JavaCompile) project.getTasks() + .getByName(targetSourceSet.getCompileJavaTaskName()); + compileGeneratedRestClientTask.dependsOn(generateRestClientTask); + compileGeneratedRestClientTask.getOptions().getCompilerArgs().add("-Xlint:-deprecation"); + + // create the rest model jar file + Task restModelJarTask = project.getTasks().create(sourceSet.getName() + "RestModelJar", Jar.class, task -> + { + task.from(idlDir, copySpec -> + { + copySpec.eachFile(fileCopyDetails -> project.getLogger() + .info("Add idl file: {}", fileCopyDetails)); + copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); + }); + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-model")); + task.setDescription("Generate rest model jar"); + }); + + // create the rest client jar file + Task restClientJarTask = project.getTasks() + .create(sourceSet.getName() + "RestClientJar", Jar.class, task -> + { + task.dependsOn(compileGeneratedRestClientTask); + task.from(idlDir, copySpec -> { + copySpec.eachFile(fileCopyDetails -> { + project.getLogger().info("Add interface file: {}", fileCopyDetails); + fileCopyDetails.setPath("idl" + File.separatorChar + fileCopyDetails.getPath()); + }); + copySpec.setIncludes(Collections.singletonList('*' + IDL_FILE_SUFFIX)); + }); + task.from(targetSourceSet.getOutput()); + // FIXME change to #getArchiveAppendix().set(...); breaks backwards-compatibility before 5.1 + // DataHub Note - applied FIXME + task.getArchiveAppendix().set(getAppendix(sourceSet, "rest-client")); + task.setDescription("Generate rest client jar"); + }); + + // add the rest model jar and the rest client jar to the list of project artifacts. + if (!isTestSourceSet(sourceSet)) + { + project.getArtifacts().add("restModel", restModelJarTask); + project.getArtifacts().add("restClient", restClientJarTask); + } + else + { + project.getArtifacts().add("testRestModel", restModelJarTask); + project.getArtifacts().add("testRestClient", restClientJarTask); + } + } + + // Return the appendix for generated jar files. + // The source set name is not included for the main source set. + private static String getAppendix(SourceSet sourceSet, String suffix) + { + return sourceSet.getName().equals("main") ? suffix : sourceSet.getName() + '-' + suffix; + } + + private static Project getApiProject(Project project) + { + if (project.getExtensions().getExtraProperties().has("apiProject")) + { + return (Project) project.getExtensions().getExtraProperties().get("apiProject"); + } + + List subsSuffixes; + if (project.getExtensions().getExtraProperties().has("apiProjectSubstitutionSuffixes")) + { + @SuppressWarnings("unchecked") + List suffixValue = (List) project.getExtensions() + .getExtraProperties().get("apiProjectSubstitutionSuffixes"); + + subsSuffixes = suffixValue; + } + else + { + subsSuffixes = Arrays.asList("-impl", "-service", "-server", "-server-impl"); + } + + for (String suffix : subsSuffixes) + { + if (project.getPath().endsWith(suffix)) + { + String searchPath = project.getPath().substring(0, project.getPath().length() - suffix.length()) + "-api"; + Project apiProject = project.findProject(searchPath); + if (apiProject != null) + { + return apiProject; + } + } + } + + return project.findProject(project.getPath() + "-api"); + } + + private static Project getCheckedApiProject(Project project) + { + Project apiProject = getApiProject(project); + + if (apiProject == project) + { + throw new GradleException("The API project of ${project.path} must not be itself."); + } + + return apiProject; + } + + /** + * return the property value if the property exists and is not empty (-Pname=value) + * return null if property does not exist or the property is empty (-Pname) + * + * @param project the project where to look for the property + * @param propertyName the name of the property + */ + public static String getNonEmptyProperty(Project project, String propertyName) + { + if (!project.hasProperty(propertyName)) + { + return null; + } + + String propertyValue = project.property(propertyName).toString(); + if (propertyValue.isEmpty()) + { + return null; + } + + return propertyValue; + } + + /** + * Return true if the given property exists and its value is true + * + * @param project the project where to look for the property + * @param propertyName the name of the property + */ + public static boolean isPropertyTrue(Project project, String propertyName) + { + return project.hasProperty(propertyName) && Boolean.valueOf(project.property(propertyName).toString()); + } + + private static String createModifiedFilesMessage(Collection nonEquivExpectedFiles, + Collection foldersToBeBuilt) + { + StringBuilder builder = new StringBuilder(); + builder.append("\nRemember to checkin the changes to the following new or modified files:\n"); + for (String file : nonEquivExpectedFiles) + { + builder.append(" "); + builder.append(file); + builder.append("\n"); + } + + if (!foldersToBeBuilt.isEmpty()) + { + builder.append("\nThe file modifications include service interface changes, you can build the the following projects " + + "to re-generate the client APIs accordingly:\n"); + for (String folder : foldersToBeBuilt) + { + builder.append(" "); + builder.append(folder); + builder.append("\n"); + } + } + + return builder.toString(); + } + + private static String createPossibleMissingFilesMessage(Collection missingFiles) + { + StringBuilder builder = new StringBuilder(); + builder.append("If this is the result of an automated build, then you may have forgotten to check in some snapshot or idl files:\n"); + for (String file : missingFiles) + { + builder.append(" "); + builder.append(file); + builder.append("\n"); + } + + return builder.toString(); + } + + private static String findProperty(FileCompatibilityType type) + { + String property; + switch (type) + { + case SNAPSHOT: + property = SNAPSHOT_COMPAT_REQUIREMENT; + break; + case IDL: + property = IDL_COMPAT_REQUIREMENT; + break; + case PEGASUS_SCHEMA_SNAPSHOT: + property = PEGASUS_SCHEMA_SNAPSHOT_REQUIREMENT; + break; + case PEGASUS_EXTENSION_SCHEMA_SNAPSHOT: + property = PEGASUS_EXTENSION_SCHEMA_SNAPSHOT_REQUIREMENT; + break; + default: + throw new GradleException("No property defined for compatibility type " + type); + } + return property; + } + + private static Set buildWatchedRestModelInputDirs(Project project, SourceSet sourceSet) { + @SuppressWarnings("unchecked") + Map pegasusOptions = (Map) project + .getExtensions().getExtraProperties().get("pegasus"); + + File rootPath = new File(project.getProjectDir(), + pegasusOptions.get(sourceSet.getName()).restModelOptions.getRestResourcesRootPath()); + + IdlOptions idlOptions = pegasusOptions.get(sourceSet.getName()).idlOptions; + + // if idlItems exist, only watch the smaller subset + return idlOptions.getIdlItems().stream() + .flatMap(idlItem -> Arrays.stream(idlItem.packageNames)) + .map(packageName -> new File(rootPath, packageName.replace('.', '/'))) + .collect(Collectors.toCollection(TreeSet::new)); + } + + private static Set difference(Set left, Set right) + { + Set result = new HashSet<>(left); + result.removeAll(right); + return result; + } + + /** + * Configures the given source set so that its data schema directory (usually 'pegasus') is marked as a resource root. + * The purpose of this is to improve the IDE experience. Makes sure to exclude this directory from being packaged in + * with the default Jar task. + */ + private static void configureDataSchemaResourcesRoot(Project project, SourceSet sourceSet) + { + sourceSet.resources(sourceDirectorySet -> { + final String dataSchemaPath = getDataSchemaPath(project, sourceSet); + final File dataSchemaRoot = project.file(dataSchemaPath); + sourceDirectorySet.srcDir(dataSchemaPath); + project.getLogger().info("Adding resource root '{}'", dataSchemaPath); + + final String extensionsSchemaPath = getExtensionSchemaPath(project, sourceSet); + final File extensionsSchemaRoot = project.file(extensionsSchemaPath); + sourceDirectorySet.srcDir(extensionsSchemaPath); + project.getLogger().info("Adding resource root '{}'", extensionsSchemaPath); + + // Exclude the data schema and extensions schema directory from being copied into the default Jar task + sourceDirectorySet.getFilter().exclude(fileTreeElement -> { + final File file = fileTreeElement.getFile(); + // Traversal starts with the children of a resource root, so checking the direct parent is sufficient + final boolean underDataSchemaRoot = dataSchemaRoot.equals(file.getParentFile()); + final boolean underExtensionsSchemaRoot = extensionsSchemaRoot.equals(file.getParentFile()); + final boolean exclude = (underDataSchemaRoot || underExtensionsSchemaRoot); + if (exclude) + { + project.getLogger().info("Excluding resource directory '{}'", file); + } + return exclude; + }); + }); + } + + private Task generatePegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, File inputDir, File outputDir, + boolean isExtensionSchema) + { + return project.getTasks().create(sourceSet.getTaskName("generate", taskName), + GeneratePegasusSnapshotTask.class, task -> + { + task.setInputDir(inputDir); + task.setResolverPath(getDataModelConfig(project, sourceSet).plus(project.files(getDataSchemaPath(project, sourceSet)))); + task.setClassPath(project.getConfigurations().getByName(PEGASUS_PLUGIN_CONFIGURATION)); + task.setPegasusSchemaSnapshotDestinationDir(outputDir); + task.setExtensionSchema(isExtensionSchema); + if (isPropertyTrue(project, ENABLE_ARG_FILE)) + { + task.setEnableArgFile(true); + } + }); + } + + private Task publishPegasusSchemaSnapshot(Project project, SourceSet sourceSet, String taskName, Task checkPegasusSnapshotTask, + File inputDir, File outputDir) + { + return project.getTasks().create(sourceSet.getTaskName("publish", taskName), + Sync.class, task -> + { + task.dependsOn(checkPegasusSnapshotTask); + task.from(inputDir); + task.into(outputDir); + task.onlyIf(t -> !SharedFileUtils.getSuffixedFiles(project, inputDir, PDL_FILE_SUFFIX).isEmpty()); + }); + } + + private void checkGradleVersion(Project project) + { + if (MIN_REQUIRED_VERSION.compareTo(GradleVersion.current()) > 0) + { + throw new GradleException(String.format("This plugin does not support %s. Please use %s or later.", + GradleVersion.current(), + MIN_REQUIRED_VERSION)); + } + if (MIN_SUGGESTED_VERSION.compareTo(GradleVersion.current()) > 0) + { + project.getLogger().warn(String.format("Pegasus supports %s, but it may not be supported in the next major release. Please use %s or later.", + GradleVersion.current(), + MIN_SUGGESTED_VERSION)); + } + } + + /** + * Reflection is necessary to obscure types introduced in Gradle 5.3 + * + * @param sourceSet the target sourceset upon which to create a new feature variant + * @return an Action which modifies a org.gradle.api.plugins.FeatureSpec instance + */ + private Action/**/ createFeatureVariantFromSourceSet(SourceSet sourceSet) + { + return featureSpec -> { + try + { + Class clazz = Class.forName("org.gradle.api.plugins.FeatureSpec"); + Method usingSourceSet = clazz.getDeclaredMethod("usingSourceSet", SourceSet.class); + usingSourceSet.invoke(featureSpec, sourceSet); + } + catch (ReflectiveOperationException e) + { + throw new GradleException("Unable to invoke FeatureSpec#usingSourceSet(SourceSet)", e); + } + }; + } + + protected static boolean isAtLeastGradle61() + { + return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("6.1")) >= 0; + } + + public static boolean isAtLeastGradle7() { + return GradleVersion.current().getBaseVersion().compareTo(GradleVersion.version("7.0")) >= 0; + } +} \ No newline at end of file diff --git a/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java new file mode 100644 index 00000000000000..a2aafaf1be0172 --- /dev/null +++ b/buildSrc/src/main/java/com/linkedin/pegasus/gradle/tasks/ChangedFileReportTask.java @@ -0,0 +1,124 @@ +package com.linkedin.pegasus.gradle.tasks; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Consumer; +import java.util.stream.Collectors; +import org.gradle.api.DefaultTask; +import org.gradle.api.file.FileCollection; +import org.gradle.api.specs.Specs; +import org.gradle.api.tasks.InputFiles; +import org.gradle.api.tasks.Internal; +import org.gradle.api.tasks.SkipWhenEmpty; +import org.gradle.api.tasks.TaskAction; +import org.gradle.work.FileChange; +import org.gradle.work.InputChanges; + + +public class ChangedFileReportTask extends DefaultTask +{ + private final Collection _needCheckinFiles = new ArrayList<>(); + + private FileCollection _idlFiles = getProject().files(); + private FileCollection _snapshotFiles = getProject().files(); + + public ChangedFileReportTask() + { + //with Gradle 6.0, Declaring an incremental task without outputs is not allowed. + getOutputs().upToDateWhen(Specs.satisfyNone()); + } + + // DataHub Note - updated for InputChanges + @TaskAction + public void checkFilesForChanges(InputChanges inputChanges) + { + getLogger().lifecycle("Checking idl and snapshot files for changes..."); + getLogger().info("idlFiles: " + _idlFiles.getAsPath()); + getLogger().info("snapshotFiles: " + _snapshotFiles.getAsPath()); + + Set filesRemoved = new HashSet<>(); + Set filesAdded = new HashSet<>(); + Set filesChanged = new HashSet<>(); + + if (inputChanges.isIncremental()) + { + Consumer handleChange = change -> + { + switch (change.getChangeType()) { + case ADDED: + filesAdded.add(change.getFile().getAbsolutePath()); + break; + case REMOVED: + filesRemoved.add(change.getFile().getAbsolutePath()); + break; + case MODIFIED: + filesChanged.add(change.getFile().getAbsolutePath()); + break; + } + }; + + inputChanges.getFileChanges(_idlFiles).forEach(handleChange); + inputChanges.getFileChanges(_snapshotFiles).forEach(handleChange); + + if (!filesRemoved.isEmpty()) + { + String files = joinByComma(filesRemoved); + _needCheckinFiles.add(files); + getLogger().lifecycle( + "The following files have been removed, be sure to remove them from source control: {}", files); + } + + if (!filesAdded.isEmpty()) + { + String files = joinByComma(filesAdded); + _needCheckinFiles.add(files); + getLogger().lifecycle("The following files have been added, be sure to add them to source control: {}", files); + } + + if (!filesChanged.isEmpty()) + { + String files = joinByComma(filesChanged); + _needCheckinFiles.add(files); + getLogger().lifecycle( + "The following files have been changed, be sure to commit the changes to source control: {}", files); + } + } + } + + private String joinByComma(Set files) + { + return files.stream().collect(Collectors.joining(", ")); + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getSnapshotFiles() + { + return _snapshotFiles; + } + + public void setSnapshotFiles(FileCollection snapshotFiles) + { + _snapshotFiles = snapshotFiles; + } + + @InputFiles + @SkipWhenEmpty + public FileCollection getIdlFiles() + { + return _idlFiles; + } + + public void setIdlFiles(FileCollection idlFiles) + { + _idlFiles = idlFiles; + } + + @Internal + public Collection getNeedCheckinFiles() + { + return _needCheckinFiles; + } +} \ No newline at end of file diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index fe04c3629fe582..ef33bde8f61d39 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -1,5 +1,9 @@ package auth; +import static auth.AuthUtils.*; +import static auth.sso.oidc.OidcConfigs.*; +import static utils.ConfigUtil.*; + import auth.sso.SsoConfigs; import auth.sso.SsoManager; import auth.sso.oidc.OidcConfigs; @@ -18,12 +22,10 @@ import com.linkedin.util.Configuration; import config.ConfigurationProvider; import controllers.SsoCallbackController; - import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.List; - import org.apache.commons.codec.digest.DigestUtils; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClients; @@ -42,205 +44,227 @@ import play.cache.SyncCacheApi; import utils.ConfigUtil; -import static auth.AuthUtils.*; -import static auth.sso.oidc.OidcConfigs.*; -import static utils.ConfigUtil.*; +/** Responsible for configuring, validating, and providing authentication related components. */ +public class AuthModule extends AbstractModule { + /** + * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration + * value provides a stable encryption base from which to derive the encryption key. + * + *

We hash this value (SHA256), then take the first 16 bytes as the AES key. + */ + private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; -/** - * Responsible for configuring, validating, and providing authentication related components. - */ -public class AuthModule extends AbstractModule { + private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; + private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; + private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + + private final com.typesafe.config.Config _configs; + + public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { + _configs = configs; + } + @Override + protected void configure() { /** - * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration - * value provides a stable encryption base from which to derive the encryption key. - * - * We hash this value (SHA256), then take the first 16 bytes as the AES key. + * In Pac4J, you are given the option to store the profiles of authenticated users in either (i) + * PlayCacheSessionStore - saves your data in the Play cache or (ii) PlayCookieSessionStore + * saves your data in the Play session cookie However there is problem + * (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j + * profile in cookie. Whenever the profile returned by Pac4j is greater than 4096 characters, + * the response will be rejected by the browser. Default to PlayCacheCookieStore so that + * datahub-frontend container remains as a stateless service */ - private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key"; - private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider"; - private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval"; - private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries"; + String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - private final com.typesafe.config.Config _configs; - - public AuthModule(final Environment environment, final com.typesafe.config.Config configs) { - _configs = configs; + if (sessionStoreProvider.equals("PlayCacheSessionStore")) { + final PlayCacheSessionStore playCacheSessionStore = + new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); + bind(SessionStore.class).toInstance(playCacheSessionStore); + bind(PlaySessionStore.class).toInstance(playCacheSessionStore); + } else { + PlayCookieSessionStore playCacheCookieStore; + try { + // To generate a valid encryption key from an input value, we first + // hash the input to generate a fixed-length string. Then, we convert + // it to hex and slice the first 16 bytes, because AES key length must strictly + // have a specific length. + final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); + final String aesKeyHash = + DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); + final String aesEncryptionKey = aesKeyHash.substring(0, 16); + playCacheCookieStore = + new PlayCookieSessionStore(new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); + } catch (Exception e) { + throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); + } + bind(SessionStore.class).toInstance(playCacheCookieStore); + bind(PlaySessionStore.class).toInstance(playCacheCookieStore); } - @Override - protected void configure() { - /** - * In Pac4J, you are given the option to store the profiles of authenticated users in either - * (i) PlayCacheSessionStore - saves your data in the Play cache or - * (ii) PlayCookieSessionStore saves your data in the Play session cookie - * However there is problem (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j profile in cookie. - * Whenever the profile returned by Pac4j is greater than 4096 characters, the response will be rejected by the browser. - * Default to PlayCacheCookieStore so that datahub-frontend container remains as a stateless service - */ - String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF); - - if (sessionStoreProvider.equals("PlayCacheSessionStore")) { - final PlayCacheSessionStore playCacheSessionStore = new PlayCacheSessionStore(getProvider(SyncCacheApi.class)); - bind(SessionStore.class).toInstance(playCacheSessionStore); - bind(PlaySessionStore.class).toInstance(playCacheSessionStore); - } else { - PlayCookieSessionStore playCacheCookieStore; - try { - // To generate a valid encryption key from an input value, we first - // hash the input to generate a fixed-length string. Then, we convert - // it to hex and slice the first 16 bytes, because AES key length must strictly - // have a specific length. - final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF); - final String aesKeyHash = DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8)); - final String aesEncryptionKey = aesKeyHash.substring(0, 16); - playCacheCookieStore = new PlayCookieSessionStore( - new ShiroAesDataEncrypter(aesEncryptionKey.getBytes())); - } catch (Exception e) { - throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e); - } - bind(SessionStore.class).toInstance(playCacheCookieStore); - bind(PlaySessionStore.class).toInstance(playCacheCookieStore); - } - - try { - bind(SsoCallbackController.class).toConstructor(SsoCallbackController.class.getConstructor( - SsoManager.class, - Authentication.class, - SystemEntityClient.class, - AuthServiceClient.class, - com.typesafe.config.Config.class)); - } catch (NoSuchMethodException | SecurityException e) { - throw new RuntimeException("Failed to bind to SsoCallbackController. Cannot find constructor", e); - } - // logout - final LogoutController logoutController = new LogoutController(); - logoutController.setDefaultUrl("/"); - bind(LogoutController.class).toInstance(logoutController); + try { + bind(SsoCallbackController.class) + .toConstructor( + SsoCallbackController.class.getConstructor( + SsoManager.class, + Authentication.class, + SystemEntityClient.class, + AuthServiceClient.class, + com.typesafe.config.Config.class)); + } catch (NoSuchMethodException | SecurityException e) { + throw new RuntimeException( + "Failed to bind to SsoCallbackController. Cannot find constructor", e); } + // logout + final LogoutController logoutController = new LogoutController(); + logoutController.setDefaultUrl("/"); + bind(LogoutController.class).toInstance(logoutController); + } - @Provides @Singleton - protected Config provideConfig(SsoManager ssoManager) { - if (ssoManager.isSsoEnabled()) { - final Clients clients = new Clients(); - final List clientList = new ArrayList<>(); - clientList.add(ssoManager.getSsoProvider().client()); - clients.setClients(clientList); - final Config config = new Config(clients); - config.setHttpActionAdapter(new PlayHttpActionAdapter()); - return config; - } - return new Config(); + @Provides + @Singleton + protected Config provideConfig(SsoManager ssoManager) { + if (ssoManager.isSsoEnabled()) { + final Clients clients = new Clients(); + final List clientList = new ArrayList<>(); + clientList.add(ssoManager.getSsoProvider().client()); + clients.setClients(clientList); + final Config config = new Config(clients); + config.setHttpActionAdapter(new PlayHttpActionAdapter()); + return config; } + return new Config(); + } - @Provides @Singleton - protected SsoManager provideSsoManager() { - SsoManager manager = new SsoManager(); - // Seed the SSO manager with a default SSO provider. - if (isSsoEnabled(_configs)) { - SsoConfigs ssoConfigs = new SsoConfigs(_configs); - if (ssoConfigs.isOidcEnabled()) { - // Register OIDC Provider, add to list of managers. - OidcConfigs oidcConfigs = new OidcConfigs(_configs); - OidcProvider oidcProvider = new OidcProvider(oidcConfigs); - // Set the default SSO provider to this OIDC client. - manager.setSsoProvider(oidcProvider); - } - } - return manager; + @Provides + @Singleton + protected SsoManager provideSsoManager() { + SsoManager manager = new SsoManager(); + // Seed the SSO manager with a default SSO provider. + if (isSsoEnabled(_configs)) { + SsoConfigs ssoConfigs = new SsoConfigs(_configs); + if (ssoConfigs.isOidcEnabled()) { + // Register OIDC Provider, add to list of managers. + OidcConfigs oidcConfigs = new OidcConfigs(_configs); + OidcProvider oidcProvider = new OidcProvider(oidcConfigs); + // Set the default SSO provider to this OIDC client. + manager.setSsoProvider(oidcProvider); + } } + return manager; + } - @Provides - @Singleton - protected Authentication provideSystemAuthentication() { - // Returns an instance of Authentication used to authenticate system initiated calls to Metadata Service. - String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); - String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); - final Actor systemActor = - new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. - return new Authentication(systemActor, String.format("Basic %s:%s", systemClientId, systemSecret), - Collections.emptyMap()); - } + @Provides + @Singleton + protected Authentication provideSystemAuthentication() { + // Returns an instance of Authentication used to authenticate system initiated calls to Metadata + // Service. + String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); + String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); + final Actor systemActor = + new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. + return new Authentication( + systemActor, + String.format("Basic %s:%s", systemClientId, systemSecret), + Collections.emptyMap()); + } - @Provides - @Singleton - protected ConfigurationProvider provideConfigurationProvider() { - AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ConfigurationProvider.class); - return context.getBean(ConfigurationProvider.class); - } + @Provides + @Singleton + protected ConfigurationProvider provideConfigurationProvider() { + AnnotationConfigApplicationContext context = + new AnnotationConfigApplicationContext(ConfigurationProvider.class); + return context.getBean(ConfigurationProvider.class); + } - @Provides - @Singleton - protected SystemEntityClient provideEntityClient(final Authentication systemAuthentication, - final ConfigurationProvider configurationProvider) { - return new SystemRestliEntityClient(buildRestliClient(), - new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), - _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), systemAuthentication, - configurationProvider.getCache().getClient().getEntityClient()); - } + @Provides + @Singleton + protected SystemEntityClient provideEntityClient( + final Authentication systemAuthentication, + final ConfigurationProvider configurationProvider) { + return new SystemRestliEntityClient( + buildRestliClient(), + new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)), + _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), + systemAuthentication, + configurationProvider.getCache().getClient().getEntityClient()); + } - @Provides - @Singleton - protected CloseableHttpClient provideHttpClient() { - return HttpClients.createDefault(); - } + @Provides + @Singleton + protected CloseableHttpClient provideHttpClient() { + return HttpClients.createDefault(); + } - @Provides - @Singleton - protected AuthServiceClient provideAuthClient(Authentication systemAuthentication, CloseableHttpClient httpClient) { - // Init a GMS auth client - final String metadataServiceHost = - _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) - : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); - - final int metadataServicePort = - _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) - : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - - final Boolean metadataServiceUseSsl = - _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) ? _configs.getBoolean( - METADATA_SERVICE_USE_SSL_CONFIG_PATH) - : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); - - return new AuthServiceClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, - systemAuthentication, httpClient); - } + @Provides + @Singleton + protected AuthServiceClient provideAuthClient( + Authentication systemAuthentication, CloseableHttpClient httpClient) { + // Init a GMS auth client + final String metadataServiceHost = + _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) + ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) + : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); + + final int metadataServicePort = + _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) + ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) + : Integer.parseInt( + Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - private com.linkedin.restli.client.Client buildRestliClient() { - final String metadataServiceHost = utils.ConfigUtil.getString( + final Boolean metadataServiceUseSsl = + _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + ? _configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) + : Boolean.parseBoolean( + Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + + return new AuthServiceClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + systemAuthentication, + httpClient); + } + + private com.linkedin.restli.client.Client buildRestliClient() { + final String metadataServiceHost = + utils.ConfigUtil.getString( _configs, METADATA_SERVICE_HOST_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = utils.ConfigUtil.getInt( + final int metadataServicePort = + utils.ConfigUtil.getInt( _configs, utils.ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, utils.ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = utils.ConfigUtil.getBoolean( + final boolean metadataServiceUseSsl = + utils.ConfigUtil.getBoolean( _configs, utils.ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); - final String metadataServiceSslProtocol = utils.ConfigUtil.getString( + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); + final String metadataServiceSslProtocol = + utils.ConfigUtil.getString( _configs, utils.ConfigUtil.METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL - ); - return DefaultRestliClientFactory.getRestLiClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, metadataServiceSslProtocol); - } + ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL); + return DefaultRestliClientFactory.getRestLiClient( + metadataServiceHost, + metadataServicePort, + metadataServiceUseSsl, + metadataServiceSslProtocol); + } - protected boolean isSsoEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); - } + protected boolean isSsoEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + } - protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { - // If OIDC is enabled, we infer SSO to be enabled. - return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( + protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) { + // If OIDC is enabled, we infer SSO to be enabled. + return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( Boolean.parseBoolean(configs.getString(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH))); - } + } } - diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java index 386eee725c83d0..283a2164584b95 100644 --- a/datahub-frontend/app/auth/AuthUtils.java +++ b/datahub-frontend/app/auth/AuthUtils.java @@ -1,137 +1,136 @@ package auth; import com.linkedin.common.urn.CorpuserUrn; -import lombok.extern.slf4j.Slf4j; -import play.mvc.Http; - -import javax.annotation.Nonnull; import java.time.Duration; import java.time.temporal.ChronoUnit; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; +import play.mvc.Http; @Slf4j public class AuthUtils { - /** - * The config path that determines whether Metadata Service Authentication is enabled. - * - * When enabled, the frontend server will proxy requests to the Metadata Service without requiring them to have a valid - * frontend-issued Session Cookie. This effectively means delegating the act of authentication to the Metadata Service. It - * is critical that if Metadata Service authentication is enabled at the frontend service layer, it is also enabled in the - * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. - * - * When disabled, the frontend server will require that all requests have a valid Session Cookie associated with them. Otherwise, - * requests will be denied with an Unauthorized error. - */ - public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = "metadataService.auth.enabled"; - - /** - * The attribute inside session cookie representing a GMS-issued access token - */ - public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; - - /** - * An ID used to identify system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; - - /** - * An Secret used to authenticate system callers that are internal to DataHub. Provided via configuration. - */ - public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; - - /** - * Cookie name for redirect url that is manually separated from the session to reduce size - */ - public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; - - public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); - - public static final String LOGIN_ROUTE = "/login"; - public static final String USER_NAME = "username"; - public static final String PASSWORD = "password"; - public static final String ACTOR = "actor"; - public static final String ACCESS_TOKEN = "token"; - public static final String FULL_NAME = "fullName"; - public static final String EMAIL = "email"; - public static final String TITLE = "title"; - public static final String INVITE_TOKEN = "inviteToken"; - public static final String RESET_TOKEN = "resetToken"; - - /** - * Determines whether the inbound request should be forward to downstream Metadata Service. Today, this simply - * checks for the presence of an "Authorization" header or the presence of a valid session cookie issued - * by the frontend. - * - * Note that this method DOES NOT actually verify the authentication token of an inbound request. That will - * be handled by the downstream Metadata Service. Until then, the request should be treated as UNAUTHENTICATED. - * - * Returns true if the request is eligible to be forwarded to GMS, false otherwise. - */ - public static boolean isEligibleForForwarding(Http.Request req) { - return hasValidSessionCookie(req) || hasAuthHeader(req); + /** + * The config path that determines whether Metadata Service Authentication is enabled. + * + *

When enabled, the frontend server will proxy requests to the Metadata Service without + * requiring them to have a valid frontend-issued Session Cookie. This effectively means + * delegating the act of authentication to the Metadata Service. It is critical that if Metadata + * Service authentication is enabled at the frontend service layer, it is also enabled in the + * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself. + * + *

When disabled, the frontend server will require that all requests have a valid Session + * Cookie associated with them. Otherwise, requests will be denied with an Unauthorized error. + */ + public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = + "metadataService.auth.enabled"; + + /** The attribute inside session cookie representing a GMS-issued access token */ + public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token"; + + /** + * An ID used to identify system callers that are internal to DataHub. Provided via configuration. + */ + public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId"; + + /** + * An Secret used to authenticate system callers that are internal to DataHub. Provided via + * configuration. + */ + public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret"; + + /** Cookie name for redirect url that is manually separated from the session to reduce size */ + public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL"; + + public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub"); + + public static final String LOGIN_ROUTE = "/login"; + public static final String USER_NAME = "username"; + public static final String PASSWORD = "password"; + public static final String ACTOR = "actor"; + public static final String ACCESS_TOKEN = "token"; + public static final String FULL_NAME = "fullName"; + public static final String EMAIL = "email"; + public static final String TITLE = "title"; + public static final String INVITE_TOKEN = "inviteToken"; + public static final String RESET_TOKEN = "resetToken"; + + /** + * Determines whether the inbound request should be forward to downstream Metadata Service. Today, + * this simply checks for the presence of an "Authorization" header or the presence of a valid + * session cookie issued by the frontend. + * + *

Note that this method DOES NOT actually verify the authentication token of an inbound + * request. That will be handled by the downstream Metadata Service. Until then, the request + * should be treated as UNAUTHENTICATED. + * + *

Returns true if the request is eligible to be forwarded to GMS, false otherwise. + */ + public static boolean isEligibleForForwarding(Http.Request req) { + return hasValidSessionCookie(req) || hasAuthHeader(req); + } + + /** + * Returns true if a request has a valid session cookie issued by the frontend server. Note that + * this DOES NOT verify whether the token within the session cookie will be accepted by the + * downstream GMS service. + * + *

Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, + * as well as their agreement to determine authentication status. + */ + public static boolean hasValidSessionCookie(final Http.Request req) { + Map sessionCookie = req.session().data(); + return sessionCookie.containsKey(ACCESS_TOKEN) + && sessionCookie.containsKey(ACTOR) + && req.getCookie(ACTOR).isPresent() + && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); + } + + /** Returns true if a request includes the Authorization header, false otherwise */ + public static boolean hasAuthHeader(final Http.Request req) { + return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); + } + + /** + * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. + * + * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" + * @param ttlInHours the number of hours until the actor cookie expires after being set + */ + public static Http.Cookie createActorCookie( + @Nonnull final String actorUrn, + @Nonnull final Integer ttlInHours, + @Nonnull final String sameSite, + final boolean isSecure) { + return Http.Cookie.builder(ACTOR, actorUrn) + .withHttpOnly(false) + .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) + .withSameSite(convertSameSiteValue(sameSite)) + .withSecure(isSecure) + .build(); + } + + public static Map createSessionMap( + final String userUrnStr, final String accessToken) { + final Map sessionAttributes = new HashMap<>(); + sessionAttributes.put(ACTOR, userUrnStr); + sessionAttributes.put(ACCESS_TOKEN, accessToken); + return sessionAttributes; + } + + private AuthUtils() {} + + private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { + try { + return Http.Cookie.SameSite.valueOf(sameSiteValue); + } catch (IllegalArgumentException e) { + log.warn( + String.format( + "Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), + e); + return Http.Cookie.SameSite.LAX; } - - /** - * Returns true if a request has a valid session cookie issued by the frontend server. - * Note that this DOES NOT verify whether the token within the session cookie will be accepted - * by the downstream GMS service. - * - * Note that we depend on the presence of 2 cookies, one accessible to the browser and one not, - * as well as their agreement to determine authentication status. - */ - public static boolean hasValidSessionCookie(final Http.Request req) { - Map sessionCookie = req.session().data(); - return sessionCookie.containsKey(ACCESS_TOKEN) - && sessionCookie.containsKey(ACTOR) - && req.getCookie(ACTOR).isPresent() - && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value()); - } - - /** - * Returns true if a request includes the Authorization header, false otherwise - */ - public static boolean hasAuthHeader(final Http.Request req) { - return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION); - } - - /** - * Creates a client authentication cookie (actor cookie) with a specified TTL in hours. - * - * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub" - * @param ttlInHours the number of hours until the actor cookie expires after being set - */ - public static Http.Cookie createActorCookie( - @Nonnull final String actorUrn, - @Nonnull final Integer ttlInHours, - @Nonnull final String sameSite, - final boolean isSecure - ) { - return Http.Cookie.builder(ACTOR, actorUrn) - .withHttpOnly(false) - .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS)) - .withSameSite(convertSameSiteValue(sameSite)) - .withSecure(isSecure) - .build(); - } - - public static Map createSessionMap(final String userUrnStr, final String accessToken) { - final Map sessionAttributes = new HashMap<>(); - sessionAttributes.put(ACTOR, userUrnStr); - sessionAttributes.put(ACCESS_TOKEN, accessToken); - return sessionAttributes; - } - - private AuthUtils() { } - - private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) { - try { - return Http.Cookie.SameSite.valueOf(sameSiteValue); - } catch (IllegalArgumentException e) { - log.warn(String.format("Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), e); - return Http.Cookie.SameSite.LAX; - } - } - + } } diff --git a/datahub-frontend/app/auth/Authenticator.java b/datahub-frontend/app/auth/Authenticator.java index ae847b318dce28..8536fc7e016956 100644 --- a/datahub-frontend/app/auth/Authenticator.java +++ b/datahub-frontend/app/auth/Authenticator.java @@ -1,48 +1,49 @@ package auth; +import static auth.AuthUtils.*; + import com.typesafe.config.Config; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; import javax.inject.Inject; import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; - -import static auth.AuthUtils.*; - - /** * Implementation of base Play Authentication used to determine if a request to a route should be * authenticated. */ public class Authenticator extends Security.Authenticator { - private final boolean metadataServiceAuthEnabled; + private final boolean metadataServiceAuthEnabled; - @Inject - public Authenticator(@Nonnull Config config) { - this.metadataServiceAuthEnabled = config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) + @Inject + public Authenticator(@Nonnull Config config) { + this.metadataServiceAuthEnabled = + config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH) && config.getBoolean(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH); + } + + @Override + public Optional getUsername(@Nonnull Http.Request req) { + if (this.metadataServiceAuthEnabled) { + // If Metadata Service auth is enabled, we only want to verify presence of the + // "Authorization" header OR the presence of a frontend generated session cookie. + // At this time, the actor is still considered to be unauthenicated. + return Optional.ofNullable( + AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); + } else { + // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. + return Optional.ofNullable( + AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); } + } - @Override - public Optional getUsername(@Nonnull Http.Request req) { - if (this.metadataServiceAuthEnabled) { - // If Metadata Service auth is enabled, we only want to verify presence of the - // "Authorization" header OR the presence of a frontend generated session cookie. - // At this time, the actor is still considered to be unauthenicated. - return Optional.ofNullable(AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null); - } else { - // If Metadata Service auth is not enabled, verify the presence of a valid session cookie. - return Optional.ofNullable(AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null); - } - } - - @Override - @Nonnull - public Result onUnauthorized(@Nullable Http.Request req) { - return unauthorized(); - } + @Override + @Nonnull + public Result onUnauthorized(@Nullable Http.Request req) { + return unauthorized(); + } } diff --git a/datahub-frontend/app/auth/ConfigUtil.java b/datahub-frontend/app/auth/ConfigUtil.java index e0999ee00be386..9fbed91ce6a10c 100644 --- a/datahub-frontend/app/auth/ConfigUtil.java +++ b/datahub-frontend/app/auth/ConfigUtil.java @@ -3,20 +3,20 @@ import com.typesafe.config.Config; import java.util.Optional; - public class ConfigUtil { - private ConfigUtil() { - } + private ConfigUtil() {} public static String getRequired(final Config configs, final String path) { if (!configs.hasPath(path)) { - throw new IllegalArgumentException(String.format("Missing required config with path %s", path)); + throw new IllegalArgumentException( + String.format("Missing required config with path %s", path)); } return configs.getString(path); } - public static String getOptional(final Config configs, final String path, final String defaultVal) { + public static String getOptional( + final Config configs, final String path, final String defaultVal) { if (!configs.hasPath(path)) { return defaultVal; } diff --git a/datahub-frontend/app/auth/CookieConfigs.java b/datahub-frontend/app/auth/CookieConfigs.java index b6da9b7a1833c4..63b2ce61aaf9bb 100644 --- a/datahub-frontend/app/auth/CookieConfigs.java +++ b/datahub-frontend/app/auth/CookieConfigs.java @@ -1,6 +1,5 @@ package auth; - import com.typesafe.config.Config; public class CookieConfigs { @@ -16,12 +15,18 @@ public class CookieConfigs { private final boolean _authCookieSecure; public CookieConfigs(final Config configs) { - _ttlInHours = configs.hasPath(SESSION_TTL_CONFIG_PATH) ? configs.getInt(SESSION_TTL_CONFIG_PATH) - : DEFAULT_SESSION_TTL_HOURS; - _authCookieSameSite = configs.hasPath(AUTH_COOKIE_SAME_SITE) ? configs.getString(AUTH_COOKIE_SAME_SITE) - : DEFAULT_AUTH_COOKIE_SAME_SITE; - _authCookieSecure = configs.hasPath(AUTH_COOKIE_SECURE) ? configs.getBoolean(AUTH_COOKIE_SECURE) - : DEFAULT_AUTH_COOKIE_SECURE; + _ttlInHours = + configs.hasPath(SESSION_TTL_CONFIG_PATH) + ? configs.getInt(SESSION_TTL_CONFIG_PATH) + : DEFAULT_SESSION_TTL_HOURS; + _authCookieSameSite = + configs.hasPath(AUTH_COOKIE_SAME_SITE) + ? configs.getString(AUTH_COOKIE_SAME_SITE) + : DEFAULT_AUTH_COOKIE_SAME_SITE; + _authCookieSecure = + configs.hasPath(AUTH_COOKIE_SECURE) + ? configs.getBoolean(AUTH_COOKIE_SECURE) + : DEFAULT_AUTH_COOKIE_SECURE; } public int getTtlInHours() { diff --git a/datahub-frontend/app/auth/JAASConfigs.java b/datahub-frontend/app/auth/JAASConfigs.java index f39c20aceb6f9b..529bf98e1fdcf2 100644 --- a/datahub-frontend/app/auth/JAASConfigs.java +++ b/datahub-frontend/app/auth/JAASConfigs.java @@ -6,17 +6,18 @@ */ public class JAASConfigs { - public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; + public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled"; - private Boolean _isEnabled = true; + private Boolean _isEnabled = true; - public JAASConfigs(final com.typesafe.config.Config configs) { - if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { - _isEnabled = false; - } + public JAASConfigs(final com.typesafe.config.Config configs) { + if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) + && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) { + _isEnabled = false; } + } - public boolean isJAASEnabled() { - return _isEnabled; - } + public boolean isJAASEnabled() { + return _isEnabled; + } } diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java index 3114da92d7d79a..772c2c8f92f28c 100644 --- a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java +++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java @@ -1,23 +1,27 @@ package auth; -/** - * Currently, this config enables or disable native user authentication. - */ +/** Currently, this config enables or disable native user authentication. */ public class NativeAuthenticationConfigs { public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled"; - public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = "auth.native.signUp.enforceValidEmail"; + public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = + "auth.native.signUp.enforceValidEmail"; private Boolean _isEnabled = true; private Boolean _isEnforceValidEmailEnabled = true; public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) { if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH)) { - _isEnabled = Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); + _isEnabled = + Boolean.parseBoolean( + configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString()); } if (configs.hasPath(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)) { _isEnforceValidEmailEnabled = - Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH).toString()); + Boolean.parseBoolean( + configs + .getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH) + .toString()); } } diff --git a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java index a6dbd69a938893..223ac669bd6eae 100644 --- a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java +++ b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java @@ -7,16 +7,15 @@ import play.api.mvc.FlashCookieBaker; import play.api.mvc.SessionCookieBaker; - public class CustomCookiesModule extends AbstractModule { @Override public void configure() { bind(CookieSigner.class).toProvider(CookieSignerProvider.class); - // We override the session cookie baker to not use a fallback, this prevents using an old URL Encoded cookie + // We override the session cookie baker to not use a fallback, this prevents using an old URL + // Encoded cookie bind(SessionCookieBaker.class).to(CustomSessionCookieBaker.class); // We don't care about flash cookies, we don't use them bind(FlashCookieBaker.class).to(DefaultFlashCookieBaker.class); } - } diff --git a/datahub-frontend/app/auth/sso/SsoConfigs.java b/datahub-frontend/app/auth/sso/SsoConfigs.java index 062054173bddb7..1f8455e773ffb1 100644 --- a/datahub-frontend/app/auth/sso/SsoConfigs.java +++ b/datahub-frontend/app/auth/sso/SsoConfigs.java @@ -2,24 +2,19 @@ import static auth.ConfigUtil.*; - -/** - * Class responsible for extracting and validating top-level SSO related configurations. - */ +/** Class responsible for extracting and validating top-level SSO related configurations. */ public class SsoConfigs { - /** - * Required configs - */ + /** Required configs */ private static final String AUTH_BASE_URL_CONFIG_PATH = "auth.baseUrl"; + private static final String AUTH_BASE_CALLBACK_PATH_CONFIG_PATH = "auth.baseCallbackPath"; private static final String AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH = "auth.successRedirectPath"; public static final String OIDC_ENABLED_CONFIG_PATH = "auth.oidc.enabled"; - /** - * Default values - */ + /** Default values */ private static final String DEFAULT_BASE_CALLBACK_PATH = "/callback"; + private static final String DEFAULT_SUCCESS_REDIRECT_PATH = "/"; private final String _authBaseUrl; @@ -29,17 +24,14 @@ public class SsoConfigs { public SsoConfigs(final com.typesafe.config.Config configs) { _authBaseUrl = getRequired(configs, AUTH_BASE_URL_CONFIG_PATH); - _authBaseCallbackPath = getOptional( - configs, - AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, - DEFAULT_BASE_CALLBACK_PATH); - _authSuccessRedirectPath = getOptional( - configs, - AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, - DEFAULT_SUCCESS_REDIRECT_PATH); - _oidcEnabled = configs.hasPath(OIDC_ENABLED_CONFIG_PATH) - && Boolean.TRUE.equals( - Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); + _authBaseCallbackPath = + getOptional(configs, AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, DEFAULT_BASE_CALLBACK_PATH); + _authSuccessRedirectPath = + getOptional(configs, AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, DEFAULT_SUCCESS_REDIRECT_PATH); + _oidcEnabled = + configs.hasPath(OIDC_ENABLED_CONFIG_PATH) + && Boolean.TRUE.equals( + Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH))); } public String getAuthBaseUrl() { diff --git a/datahub-frontend/app/auth/sso/SsoManager.java b/datahub-frontend/app/auth/sso/SsoManager.java index 739ce3f1ba4508..bf33f4148a5531 100644 --- a/datahub-frontend/app/auth/sso/SsoManager.java +++ b/datahub-frontend/app/auth/sso/SsoManager.java @@ -2,19 +2,16 @@ import javax.annotation.Nonnull; - -/** - * Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. - */ +/** Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. */ public class SsoManager { private SsoProvider _provider; // Only one active provider at a time. - public SsoManager() { } + public SsoManager() {} /** - * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been - * provided to the manager. + * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been provided to the + * manager. * * @return true if SSO logic is enabled, false otherwise. */ @@ -34,8 +31,8 @@ public void setSsoProvider(@Nonnull final SsoProvider provider) { /** * Gets the active {@link SsoProvider} instance. * - * @return the {@SsoProvider} that should be used during authentication and on - * IdP callback, or null if SSO is not enabled. + * @return the {@SsoProvider} that should be used during authentication and on IdP callback, or + * null if SSO is not enabled. */ public SsoProvider getSsoProvider() { return _provider; diff --git a/datahub-frontend/app/auth/sso/SsoProvider.java b/datahub-frontend/app/auth/sso/SsoProvider.java index f7454d599ba995..a0947b52b92ae6 100644 --- a/datahub-frontend/app/auth/sso/SsoProvider.java +++ b/datahub-frontend/app/auth/sso/SsoProvider.java @@ -3,15 +3,10 @@ import org.pac4j.core.client.Client; import org.pac4j.core.credentials.Credentials; -/** - * A thin interface over a Pac4j {@link Client} object and its - * associated configurations. - */ +/** A thin interface over a Pac4j {@link Client} object and its associated configurations. */ public interface SsoProvider { - /** - * The protocol used for SSO. - */ + /** The protocol used for SSO. */ enum SsoProtocol { OIDC("oidc"); // SAML -- not yet supported. @@ -28,19 +23,12 @@ public String getCommonName() { } } - /** - * Returns the configs required by the provider. - */ + /** Returns the configs required by the provider. */ C configs(); - /** - * Returns the SSO protocol associated with the provider instance. - */ + /** Returns the SSO protocol associated with the provider instance. */ SsoProtocol protocol(); - /** - * Retrieves an initialized Pac4j {@link Client}. - */ + /** Retrieves an initialized Pac4j {@link Client}. */ Client client(); - } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java index baca144610ec4c..fa676d2d16c904 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java @@ -1,9 +1,9 @@ package auth.sso.oidc; +import com.nimbusds.jwt.JWT; +import com.nimbusds.jwt.JWTParser; import java.util.Map.Entry; import java.util.Optional; - -import com.nimbusds.jwt.JWTParser; import org.pac4j.core.authorization.generator.AuthorizationGenerator; import org.pac4j.core.context.WebContext; import org.pac4j.core.profile.AttributeLocation; @@ -14,44 +14,43 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import com.nimbusds.jwt.JWT; - public class OidcAuthorizationGenerator implements AuthorizationGenerator { - private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - - private final ProfileDefinition profileDef; + private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class); - private final OidcConfigs oidcConfigs; + private final ProfileDefinition profileDef; - public OidcAuthorizationGenerator(final ProfileDefinition profileDef, final OidcConfigs oidcConfigs) { - this.profileDef = profileDef; - this.oidcConfigs = oidcConfigs; - } + private final OidcConfigs oidcConfigs; - @Override - public Optional generate(WebContext context, UserProfile profile) { - if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { - try { - final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); - - CommonProfile commonProfile = new CommonProfile(); - - for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { - final String claimName = entry.getKey(); - - if (profile.getAttribute(claimName) == null) { - profileDef.convertAndAdd(commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); - } - } - - return Optional.of(commonProfile); - } catch (Exception e) { - logger.warn("Cannot parse access token claims", e); - } + public OidcAuthorizationGenerator( + final ProfileDefinition profileDef, final OidcConfigs oidcConfigs) { + this.profileDef = profileDef; + this.oidcConfigs = oidcConfigs; + } + + @Override + public Optional generate(WebContext context, UserProfile profile) { + if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) { + try { + final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue()); + + CommonProfile commonProfile = new CommonProfile(); + + for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) { + final String claimName = entry.getKey(); + + if (profile.getAttribute(claimName) == null) { + profileDef.convertAndAdd( + commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue()); + } } - - return Optional.ofNullable(profile); + + return Optional.of(commonProfile); + } catch (Exception e) { + logger.warn("Cannot parse access token claims", e); + } } - + + return Optional.ofNullable(profile); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java index 7164710f4e0ded..fa562f54312eca 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java @@ -1,6 +1,13 @@ package auth.sso.oidc; +import static auth.AuthUtils.*; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; +import static org.pac4j.play.store.PlayCookieSessionStore.*; +import static play.mvc.Results.internalServerError; + import auth.CookieConfigs; +import auth.sso.SsoManager; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -59,23 +66,16 @@ import org.pac4j.core.util.Pac4jConstants; import org.pac4j.play.PlayWebContext; import play.mvc.Result; -import auth.sso.SsoManager; - -import static auth.AuthUtils.*; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME; -import static org.pac4j.play.store.PlayCookieSessionStore.*; -import static play.mvc.Results.internalServerError; - /** - * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects back to D - * DataHub after an authentication attempt. + * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects + * back to D DataHub after an authentication attempt. * - * On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract - * basic information about the user including their name, email, groups, & more. If just-in-time provisioning - * is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for the user, along with any Groups - * ({@link CorpGroupSnapshot}) that can be extracted, only doing so if the user does not already exist. + *

On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract + * basic information about the user including their name, email, groups, & more. If just-in-time + * provisioning is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for + * the user, along with any Groups ({@link CorpGroupSnapshot}) that can be extracted, only doing so + * if the user does not already exist. */ @Slf4j public class OidcCallbackLogic extends DefaultCallbackLogic { @@ -86,9 +86,12 @@ public class OidcCallbackLogic extends DefaultCallbackLogic httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { setContextRedirectUrl(context); final Result result = - super.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, + super.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, defaultClient); // Handle OIDC authentication errors. @@ -119,14 +135,25 @@ public Result perform(PlayWebContext context, Config config, @SuppressWarnings("unchecked") private void setContextRedirectUrl(PlayWebContext context) { - Optional redirectUrl = context.getRequestCookies().stream() - .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())).findFirst(); + Optional redirectUrl = + context.getRequestCookies().stream() + .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())) + .findFirst(); redirectUrl.ifPresent( - cookie -> context.getSessionStore().set(context, Pac4jConstants.REQUESTED_URL, - JAVA_SER_HELPER.deserializeFromBytes(uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); + cookie -> + context + .getSessionStore() + .set( + context, + Pac4jConstants.REQUESTED_URL, + JAVA_SER_HELPER.deserializeFromBytes( + uncompressBytes(Base64.getDecoder().decode(cookie.getValue()))))); } - private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result result, final PlayWebContext context, + private Result handleOidcCallback( + final OidcConfigs oidcConfigs, + final Result result, + final PlayWebContext context, final ProfileManager profileManager) { log.debug("Beginning OIDC Callback Handling..."); @@ -134,14 +161,17 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re if (profileManager.isAuthenticated()) { // If authenticated, the user should have a profile. final CommonProfile profile = (CommonProfile) profileManager.get(true).get(); - log.debug(String.format("Found authenticated user with profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Found authenticated user with profile %s", profile.getAttributes().toString())); // Extract the User name required to log into DataHub. final String userName = extractUserNameOrThrow(oidcConfigs, profile); final CorpuserUrn corpUserUrn = new CorpuserUrn(userName); try { - // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does not exist. + // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does + // not exist. if (oidcConfigs.isJitProvisioningEnabled()) { log.debug("Just-in-time provisioning is enabled. Beginning provisioning process..."); CorpUserSnapshot extractedUser = extractUser(corpUserUrn, profile); @@ -150,7 +180,8 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re // Extract groups & provision them. List extractedGroups = extractGroups(profile); tryProvisionGroups(extractedGroups); - // Add users to groups on DataHub. Note that this clears existing group membership for a user if it already exists. + // Add users to groups on DataHub. Note that this clears existing group membership for a + // user if it already exists. updateGroupMembership(corpUserUrn, createGroupMembership(extractedGroups)); } } else if (oidcConfigs.isPreProvisioningRequired()) { @@ -160,55 +191,69 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re } // Update user status to active on login. // If we want to prevent certain users from logging in, here's where we'll want to do it. - setUserStatus(corpUserUrn, new CorpUserStatus().setStatus(Constants.CORP_USER_STATUS_ACTIVE) - .setLastModified(new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) - .setTime(System.currentTimeMillis()))); + setUserStatus( + corpUserUrn, + new CorpUserStatus() + .setStatus(Constants.CORP_USER_STATUS_ACTIVE) + .setLastModified( + new AuditStamp() + .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()))); } catch (Exception e) { log.error("Failed to perform post authentication steps. Redirecting to error page.", e); return internalServerError( - String.format("Failed to perform post authentication steps. Error message: %s", e.getMessage())); + String.format( + "Failed to perform post authentication steps. Error message: %s", e.getMessage())); } // Successfully logged in - Generate GMS login token final String accessToken = _authClient.generateSessionTokenForUser(corpUserUrn.getId()); return result - .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) - .withCookies( - createActorCookie( - corpUserUrn.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + .withSession(createSessionMap(corpUserUrn.toString(), accessToken)) + .withCookies( + createActorCookie( + corpUserUrn.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); } return internalServerError( "Failed to authenticate current user. Cannot find valid identity provider profile in session."); } - private String extractUserNameOrThrow(final OidcConfigs oidcConfigs, final CommonProfile profile) { + private String extractUserNameOrThrow( + final OidcConfigs oidcConfigs, final CommonProfile profile) { // Ensure that the attribute exists (was returned by IdP) if (!profile.containsAttribute(oidcConfigs.getUserNameClaim())) { - throw new RuntimeException(String.format( - "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", - oidcConfigs.getUserNameClaim(), oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString())); + throw new RuntimeException( + String.format( + "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s", + oidcConfigs.getUserNameClaim(), + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString())); } final String userNameClaim = (String) profile.getAttribute(oidcConfigs.getUserNameClaim()); - final Optional mappedUserName = extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); - - return mappedUserName.orElseThrow(() -> new RuntimeException( - String.format("Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", - userNameClaim, oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString()))); + final Optional mappedUserName = + extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim); + + return mappedUserName.orElseThrow( + () -> + new RuntimeException( + String.format( + "Failed to extract DataHub username from username claim %s using regex %s. Profile: %s", + userNameClaim, + oidcConfigs.getUserNameClaimRegex(), + profile.getAttributes().toString()))); } - /** - * Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. - */ + /** Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. */ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { - log.debug(String.format("Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract user from OIDC profile %s", profile.getAttributes().toString())); // Extracts these based on the default set of OIDC claims, described here: // https://developer.okta.com/blog/2017/07/25/oidc-primer-part-1 @@ -217,7 +262,9 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { String email = profile.getEmail(); URI picture = profile.getPictureUrl(); String displayName = profile.getDisplayName(); - String fullName = (String) profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. + String fullName = + (String) + profile.getAttribute("name"); // Name claim is sometimes provided, including by Google. if (fullName == null && firstName != null && lastName != null) { fullName = String.format("%s %s", firstName, lastName); } @@ -231,7 +278,8 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { userInfo.setFullName(fullName, SetMode.IGNORE_NULL); userInfo.setEmail(email, SetMode.IGNORE_NULL); // If there is a display name, use it. Otherwise fall back to full name. - userInfo.setDisplayName(displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); + userInfo.setDisplayName( + displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL); final CorpUserEditableInfo editableInfo = new CorpUserEditableInfo(); try { @@ -254,15 +302,18 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) { private List extractGroups(CommonProfile profile) { - log.debug(String.format("Attempting to extract groups from OIDC profile %s", profile.getAttributes().toString())); + log.debug( + String.format( + "Attempting to extract groups from OIDC profile %s", + profile.getAttributes().toString())); final OidcConfigs configs = (OidcConfigs) _ssoManager.getSsoProvider().configs(); - // First, attempt to extract a list of groups from the profile, using the group name attribute config. + // First, attempt to extract a list of groups from the profile, using the group name attribute + // config. final List extractedGroups = new ArrayList<>(); final List groupsClaimNames = - new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(","))).stream() - .map(String::trim) - .collect(Collectors.toList()); + new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(","))) + .stream().map(String::trim).collect(Collectors.toList()); for (final String groupsClaimName : groupsClaimNames) { @@ -273,14 +324,16 @@ private List extractGroups(CommonProfile profile) { final Object groupAttribute = profile.getAttribute(groupsClaimName); if (groupAttribute instanceof Collection) { // List of group names - groupNames = (Collection) profile.getAttribute(groupsClaimName, Collection.class); + groupNames = + (Collection) profile.getAttribute(groupsClaimName, Collection.class); } else if (groupAttribute instanceof String) { // Single group name groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class)); } else { log.error( - String.format("Fail to parse OIDC group claim with name %s. Unknown type %s provided.", groupsClaimName, - groupAttribute.getClass())); + String.format( + "Fail to parse OIDC group claim with name %s. Unknown type %s provided.", + groupsClaimName, groupAttribute.getClass())); // Skip over group attribute. Do not throw. groupNames = Collections.emptyList(); } @@ -297,7 +350,8 @@ private List extractGroups(CommonProfile profile) { corpGroupInfo.setDisplayName(groupName); // To deal with the possibility of spaces, we url encode the URN group name. - final String urlEncodedGroupName = URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); + final String urlEncodedGroupName = + URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString()); final CorpGroupUrn groupUrn = new CorpGroupUrn(urlEncodedGroupName); final CorpGroupSnapshot corpGroupSnapshot = new CorpGroupSnapshot(); corpGroupSnapshot.setUrn(groupUrn); @@ -306,18 +360,23 @@ private List extractGroups(CommonProfile profile) { corpGroupSnapshot.setAspects(aspects); groupSnapshots.add(corpGroupSnapshot); } catch (UnsupportedEncodingException ex) { - log.error(String.format("Failed to URL encoded extracted group name %s. Skipping", groupName)); + log.error( + String.format( + "Failed to URL encoded extracted group name %s. Skipping", groupName)); } } if (groupSnapshots.isEmpty()) { - log.warn(String.format("Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); + log.warn( + String.format( + "Failed to extract groups: No OIDC claim with name %s found", groupsClaimName)); } else { extractedGroups.addAll(groupSnapshots); } } catch (Exception e) { - log.error(String.format( - "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", - groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); + log.error( + String.format( + "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s", + groupsClaimName, profile.getAttribute(groupsClaimName).getClass())); } } } @@ -327,7 +386,8 @@ private List extractGroups(CommonProfile profile) { private GroupMembership createGroupMembership(final List extractedGroups) { final GroupMembership groupMembershipAspect = new GroupMembership(); groupMembershipAspect.setGroups( - new UrnArray(extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + new UrnArray( + extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); return groupMembershipAspect; } @@ -345,30 +405,39 @@ private void tryProvisionUser(CorpUserSnapshot corpUserSnapshot) { // If we find more than the key aspect, then the entity "exists". if (existingCorpUserSnapshot.getAspects().size() <= 1) { log.debug( - String.format("Extracted user that does not yet exist %s. Provisioning...", corpUserSnapshot.getUrn())); + String.format( + "Extracted user that does not yet exist %s. Provisioning...", + corpUserSnapshot.getUrn())); // 2. The user does not exist. Provision them. final Entity newEntity = new Entity(); newEntity.setValue(Snapshot.create(corpUserSnapshot)); _entityClient.update(newEntity, _systemAuthentication); log.debug(String.format("Successfully provisioned user %s", corpUserSnapshot.getUrn())); } - log.debug(String.format("User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); + log.debug( + String.format( + "User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn())); // Otherwise, the user exists. Skip provisioning. } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); + throw new RuntimeException( + String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e); } } private void tryProvisionGroups(List corpGroups) { - log.debug(String.format("Attempting to provision groups with urns %s", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); + log.debug( + String.format( + "Attempting to provision groups with urns %s", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList()))); // 1. Check if this user already exists. try { - final Set urnsToFetch = corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); - final Map existingGroups = _entityClient.batchGet(urnsToFetch, _systemAuthentication); + final Set urnsToFetch = + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet()); + final Map existingGroups = + _entityClient.batchGet(urnsToFetch, _systemAuthentication); log.debug(String.format("Fetched GMS groups with urns %s", existingGroups.keySet())); @@ -381,15 +450,21 @@ private void tryProvisionGroups(List corpGroups) { // If more than the key aspect exists, then the group already "exists". if (corpGroupSnapshot.getAspects().size() <= 1) { - log.debug(String.format("Extracted group that does not yet exist %s. Provisioning...", - corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + corpGroupSnapshot.getUrn())); groupsToCreate.add(extractedGroup); } - log.debug(String.format("Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); + log.debug( + String.format( + "Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn())); } else { // Should not occur until we stop returning default Key aspects for unrecognized entities. log.debug( - String.format("Extracted group that does not yet exist %s. Provisioning...", extractedGroup.getUrn())); + String.format( + "Extracted group that does not yet exist %s. Provisioning...", + extractedGroup.getUrn())); groupsToCreate.add(extractedGroup); } } @@ -400,15 +475,20 @@ private void tryProvisionGroups(List corpGroups) { log.debug(String.format("Provisioning groups with urns %s", groupsToCreateUrns)); // Now batch create all entities identified to create. - _entityClient.batchUpdate(groupsToCreate.stream() - .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) - .collect(Collectors.toSet()), _systemAuthentication); + _entityClient.batchUpdate( + groupsToCreate.stream() + .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot))) + .collect(Collectors.toSet()), + _systemAuthentication); log.debug(String.format("Successfully provisioned groups with urns %s", groupsToCreateUrns)); } catch (RemoteInvocationException e) { // Failing provisioning is something worth throwing about. - throw new RuntimeException(String.format("Failed to provision groups with urns %s.", - corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), e); + throw new RuntimeException( + String.format( + "Failed to provision groups with urns %s.", + corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), + e); } } @@ -423,12 +503,14 @@ private void updateGroupMembership(Urn urn, GroupMembership groupMembership) { try { _entityClient.ingestProposal(proposal, _systemAuthentication); } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to update group membership for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to update group membership for user with urn %s", urn), e); } } private void verifyPreProvisionedUser(CorpuserUrn urn) { - // Validate that the user exists in the system (there is more than just a key aspect for them, as of today). + // Validate that the user exists in the system (there is more than just a key aspect for them, + // as of today). try { final Entity corpUser = _entityClient.get(urn, _systemAuthentication); @@ -436,9 +518,14 @@ private void verifyPreProvisionedUser(CorpuserUrn urn) { // If we find more than the key aspect, then the entity "exists". if (corpUser.getValue().getCorpUserSnapshot().getAspects().size() <= 1) { - log.debug(String.format("Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); - throw new RuntimeException(String.format("User with urn %s has not yet been provisioned in DataHub. " - + "Please contact your DataHub admin to provision an account.", urn)); + log.debug( + String.format( + "Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn)); + throw new RuntimeException( + String.format( + "User with urn %s has not yet been provisioned in DataHub. " + + "Please contact your DataHub admin to provision an account.", + urn)); } // Otherwise, the user exists. } catch (RemoteInvocationException e) { diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java index eb037db2ef9c01..6877ca187da973 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java @@ -1,104 +1,122 @@ package auth.sso.oidc; +import static auth.ConfigUtil.*; + import auth.sso.SsoConfigs; import java.util.Optional; import lombok.Getter; -import static auth.ConfigUtil.*; - - -/** - * Class responsible for extracting and validating OIDC related configurations. - */ +/** Class responsible for extracting and validating OIDC related configurations. */ @Getter public class OidcConfigs extends SsoConfigs { - /** - * Required configs - */ - public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; - public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; - public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + /** Required configs */ + public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId"; + + public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret"; + public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri"; + + /** Optional configs */ + public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; + + public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; + public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; + public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; + public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = + "auth.oidc.clientAuthenticationMethod"; + public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = + "auth.oidc.jitProvisioningEnabled"; + public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = + "auth.oidc.preProvisioningRequired"; + public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; + public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = + "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. + public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; + public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; + public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; + public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; + public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; + public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = + "auth.oidc.extractJwtAccessTokenClaims"; + public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; - /** - * Optional configs - */ - public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim"; - public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex"; - public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope"; - public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName"; - public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = "auth.oidc.clientAuthenticationMethod"; - public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = "auth.oidc.jitProvisioningEnabled"; - public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = "auth.oidc.preProvisioningRequired"; - public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled"; - public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = "auth.oidc.groupsClaim"; // Claim expected to be an array of group names. - public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType"; - public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode"; - public static final String OIDC_USE_NONCE = "auth.oidc.useNonce"; - public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource"; - public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout"; - public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "auth.oidc.extractJwtAccessTokenClaims"; - public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm"; + /** Default values */ + private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - /** - * Default values - */ - private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email"; - private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; - private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; // Often "group" must be included for groups. - private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; - private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; - private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; - private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; - private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; // False since extraction of groups can overwrite existing group membership. - private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; - private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; + private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)"; + private static final String DEFAULT_OIDC_SCOPE = + "openid profile email"; // Often "group" must be included for groups. + private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc"; + private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic"; + private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true"; + private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false"; + private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = + "false"; // False since extraction of groups can overwrite existing group membership. + private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups"; + private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000"; - private String clientId; - private String clientSecret; - private String discoveryUri; - private String userNameClaim; - private String userNameClaimRegex; - private String scope; - private String clientName; - private String clientAuthenticationMethod; - private boolean jitProvisioningEnabled; - private boolean preProvisioningRequired; - private boolean extractGroupsEnabled; - private String groupsClaimName; - private Optional responseType; - private Optional responseMode; - private Optional useNonce; - private Optional customParamResource; - private String readTimeout; - private Optional extractJwtAccessTokenClaims; - private Optional preferredJwsAlgorithm; + private String clientId; + private String clientSecret; + private String discoveryUri; + private String userNameClaim; + private String userNameClaimRegex; + private String scope; + private String clientName; + private String clientAuthenticationMethod; + private boolean jitProvisioningEnabled; + private boolean preProvisioningRequired; + private boolean extractGroupsEnabled; + private String groupsClaimName; + private Optional responseType; + private Optional responseMode; + private Optional useNonce; + private Optional customParamResource; + private String readTimeout; + private Optional extractJwtAccessTokenClaims; + private Optional preferredJwsAlgorithm; - public OidcConfigs(final com.typesafe.config.Config configs) { - super(configs); - clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); - clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); - discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); - userNameClaim = getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); - userNameClaimRegex = - getOptional(configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); - scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); - clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); - clientAuthenticationMethod = getOptional(configs, OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, + public OidcConfigs(final com.typesafe.config.Config configs) { + super(configs); + clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH); + clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH); + discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH); + userNameClaim = + getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM); + userNameClaimRegex = + getOptional( + configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX); + scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE); + clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME); + clientAuthenticationMethod = + getOptional( + configs, + OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH, DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD); - jitProvisioningEnabled = Boolean.parseBoolean( - getOptional(configs, OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); - preProvisioningRequired = Boolean.parseBoolean( - getOptional(configs, OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); - extractGroupsEnabled = Boolean.parseBoolean( + jitProvisioningEnabled = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, + DEFAULT_OIDC_JIT_PROVISIONING_ENABLED)); + preProvisioningRequired = + Boolean.parseBoolean( + getOptional( + configs, + OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, + DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED)); + extractGroupsEnabled = + Boolean.parseBoolean( getOptional(configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED)); - groupsClaimName = getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); - responseType = getOptional(configs, OIDC_RESPONSE_TYPE); - responseMode = getOptional(configs, OIDC_RESPONSE_MODE); - useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); - customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); - readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); - extractJwtAccessTokenClaims = getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); - preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); - } + groupsClaimName = + getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM); + responseType = getOptional(configs, OIDC_RESPONSE_TYPE); + responseMode = getOptional(configs, OIDC_RESPONSE_MODE); + useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean); + customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE); + readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT); + extractJwtAccessTokenClaims = + getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean); + preferredJwsAlgorithm = + Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null)); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java index fd0a2e1877154e..39a65a46cbf919 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java @@ -10,15 +10,15 @@ import org.pac4j.oidc.credentials.OidcCredentials; import org.pac4j.oidc.profile.OidcProfileDefinition; - /** * Implementation of {@link SsoProvider} supporting the OIDC protocol. * - * This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC related - * configuration options, which reside in an instance of {@link OidcConfigs}. + *

This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC + * related configuration options, which reside in an instance of {@link OidcConfigs}. * - * It is responsible for initializing this client from a configuration object ({@link OidcConfigs}. Note that - * this class is not related to the logic performed when an IdP performs a callback to DataHub. + *

It is responsible for initializing this client from a configuration object ({@link + * OidcConfigs}. Note that this class is not related to the logic performed when an IdP performs a + * callback to DataHub. */ @Slf4j public class OidcProvider implements SsoProvider { @@ -53,7 +53,8 @@ private Client createPac4jClient() { oidcConfiguration.setClientId(_oidcConfigs.getClientId()); oidcConfiguration.setSecret(_oidcConfigs.getClientSecret()); oidcConfiguration.setDiscoveryURI(_oidcConfigs.getDiscoveryUri()); - oidcConfiguration.setClientAuthenticationMethodAsString(_oidcConfigs.getClientAuthenticationMethod()); + oidcConfiguration.setClientAuthenticationMethodAsString( + _oidcConfigs.getClientAuthenticationMethod()); oidcConfiguration.setScope(_oidcConfigs.getScope()); try { oidcConfiguration.setReadTimeout(Integer.parseInt(_oidcConfigs.getReadTimeout())); @@ -63,18 +64,24 @@ private Client createPac4jClient() { _oidcConfigs.getResponseType().ifPresent(oidcConfiguration::setResponseType); _oidcConfigs.getResponseMode().ifPresent(oidcConfiguration::setResponseMode); _oidcConfigs.getUseNonce().ifPresent(oidcConfiguration::setUseNonce); - _oidcConfigs.getCustomParamResource() + _oidcConfigs + .getCustomParamResource() .ifPresent(value -> oidcConfiguration.setCustomParams(ImmutableMap.of("resource", value))); - _oidcConfigs.getPreferredJwsAlgorithm().ifPresent(preferred -> { - log.info("Setting preferredJwsAlgorithm: " + preferred); - oidcConfiguration.setPreferredJwsAlgorithm(preferred); - }); + _oidcConfigs + .getPreferredJwsAlgorithm() + .ifPresent( + preferred -> { + log.info("Setting preferredJwsAlgorithm: " + preferred); + oidcConfiguration.setPreferredJwsAlgorithm(preferred); + }); final CustomOidcClient oidcClient = new CustomOidcClient(oidcConfiguration); oidcClient.setName(OIDC_CLIENT_NAME); - oidcClient.setCallbackUrl(_oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); + oidcClient.setCallbackUrl( + _oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath()); oidcClient.setCallbackUrlResolver(new PathParameterCallbackUrlResolver()); - oidcClient.addAuthorizationGenerator(new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); + oidcClient.addAuthorizationGenerator( + new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs)); return oidcClient; } } diff --git a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java index 014632c17e690f..9881b5e095b781 100644 --- a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java +++ b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java @@ -1,57 +1,58 @@ package auth.sso.oidc; +import static play.mvc.Results.internalServerError; +import static play.mvc.Results.unauthorized; + +import java.util.Optional; import org.pac4j.play.PlayWebContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.mvc.Result; -import java.util.Optional; - -import static play.mvc.Results.internalServerError; -import static play.mvc.Results.unauthorized; - - public class OidcResponseErrorHandler { - private OidcResponseErrorHandler() { - - } - - private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); + private OidcResponseErrorHandler() {} - private static final String ERROR_FIELD_NAME = "error"; - private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; + private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler"); - public static Result handleError(final PlayWebContext context) { + private static final String ERROR_FIELD_NAME = "error"; + private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description"; - _logger.warn("OIDC responded with an error: '{}'. Error description: '{}'", - getError(context), - getErrorDescription(context)); + public static Result handleError(final PlayWebContext context) { - if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { - return unauthorized(String.format("Access denied. " - + "The OIDC service responded with 'Access denied'. " - + "It seems that you don't have access to this application yet. Please apply for access. \n\n" - + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " - + "Error details: '%s':'%s'", - context.getRequestParameter("error"), - context.getRequestParameter("error_description"))); - } + _logger.warn( + "OIDC responded with an error: '{}'. Error description: '{}'", + getError(context), + getErrorDescription(context)); - return internalServerError( - String.format("Internal server error. The OIDC service responded with an error: '%s'.\n" - + "Error description: '%s'", getError(context).orElse(""), getErrorDescription(context).orElse(""))); + if (getError(context).isPresent() && getError(context).get().equals("access_denied")) { + return unauthorized( + String.format( + "Access denied. " + + "The OIDC service responded with 'Access denied'. " + + "It seems that you don't have access to this application yet. Please apply for access. \n\n" + + "If you already have been assigned this application, it may be so that your OIDC request is still in action. " + + "Error details: '%s':'%s'", + context.getRequestParameter("error"), + context.getRequestParameter("error_description"))); } - public static boolean isError(final PlayWebContext context) { - return getError(context).isPresent() && !getError(context).get().isEmpty(); - } + return internalServerError( + String.format( + "Internal server error. The OIDC service responded with an error: '%s'.\n" + + "Error description: '%s'", + getError(context).orElse(""), getErrorDescription(context).orElse(""))); + } - public static Optional getError(final PlayWebContext context) { - return context.getRequestParameter(ERROR_FIELD_NAME); - } + public static boolean isError(final PlayWebContext context) { + return getError(context).isPresent() && !getError(context).get().isEmpty(); + } - public static Optional getErrorDescription(final PlayWebContext context) { - return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); - } + public static Optional getError(final PlayWebContext context) { + return context.getRequestParameter(ERROR_FIELD_NAME); + } + + public static Optional getErrorDescription(final PlayWebContext context) { + return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME); + } } diff --git a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java index 8c8c250fb7e639..01f8f16171d133 100644 --- a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java +++ b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java @@ -1,8 +1,8 @@ package auth.sso.oidc.custom; -import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.AuthorizationCode; import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant; +import com.nimbusds.oauth2.sdk.AuthorizationGrant; import com.nimbusds.oauth2.sdk.ParseException; import com.nimbusds.oauth2.sdk.TokenErrorResponse; import com.nimbusds.oauth2.sdk.TokenRequest; @@ -37,7 +37,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; - public class CustomOidcAuthenticator implements Authenticator { private static final Logger logger = LoggerFactory.getLogger(OidcAuthenticator.class); @@ -61,14 +60,17 @@ public CustomOidcAuthenticator(final OidcClient client) { this.client = client; // check authentication methods - final List metadataMethods = configuration.findProviderMetadata().getTokenEndpointAuthMethods(); + final List metadataMethods = + configuration.findProviderMetadata().getTokenEndpointAuthMethods(); - final ClientAuthenticationMethod preferredMethod = getPreferredAuthenticationMethod(configuration); + final ClientAuthenticationMethod preferredMethod = + getPreferredAuthenticationMethod(configuration); final ClientAuthenticationMethod chosenMethod; if (CommonHelper.isNotEmpty(metadataMethods)) { if (preferredMethod != null) { - if (ClientAuthenticationMethod.NONE.equals(preferredMethod) || metadataMethods.contains(preferredMethod)) { + if (ClientAuthenticationMethod.NONE.equals(preferredMethod) + || metadataMethods.contains(preferredMethod)) { chosenMethod = preferredMethod; } else { throw new TechnicalException( @@ -83,8 +85,10 @@ public CustomOidcAuthenticator(final OidcClient client) { chosenMethod = firstSupportedMethod(metadataMethods); } } else { - chosenMethod = preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); - logger.info("Provider metadata does not provide Token endpoint authentication methods. Using: {}", + chosenMethod = + preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault(); + logger.info( + "Provider metadata does not provide Token endpoint authentication methods. Using: {}", chosenMethod); } @@ -103,38 +107,41 @@ public CustomOidcAuthenticator(final OidcClient client) { } /** - * The preferred {@link ClientAuthenticationMethod} specified in the given - * {@link OidcConfiguration}, or null meaning that the a - * provider-supported method should be chosen. + * The preferred {@link ClientAuthenticationMethod} specified in the given {@link + * OidcConfiguration}, or null meaning that the a provider-supported method should be + * chosen. */ - private static ClientAuthenticationMethod getPreferredAuthenticationMethod(OidcConfiguration config) { + private static ClientAuthenticationMethod getPreferredAuthenticationMethod( + OidcConfiguration config) { final ClientAuthenticationMethod configurationMethod = config.getClientAuthenticationMethod(); if (configurationMethod == null) { return null; } if (!SUPPORTED_METHODS.contains(configurationMethod)) { - throw new TechnicalException("Configured authentication method (" + configurationMethod + ") is not supported."); + throw new TechnicalException( + "Configured authentication method (" + configurationMethod + ") is not supported."); } return configurationMethod; } /** - * The first {@link ClientAuthenticationMethod} from the given list of - * methods that is supported by this implementation. + * The first {@link ClientAuthenticationMethod} from the given list of methods that is supported + * by this implementation. * - * @throws TechnicalException - * if none of the provider-supported methods is supported. + * @throws TechnicalException if none of the provider-supported methods is supported. */ - private static ClientAuthenticationMethod firstSupportedMethod(final List metadataMethods) { + private static ClientAuthenticationMethod firstSupportedMethod( + final List metadataMethods) { Optional firstSupported = metadataMethods.stream().filter((m) -> SUPPORTED_METHODS.contains(m)).findFirst(); if (firstSupported.isPresent()) { return firstSupported.get(); } else { - throw new TechnicalException("None of the Token endpoint provider metadata authentication methods are supported: " - + metadataMethods); + throw new TechnicalException( + "None of the Token endpoint provider metadata authentication methods are supported: " + + metadataMethods); } } @@ -145,21 +152,30 @@ public void validate(final OidcCredentials credentials, final WebContext context if (code != null) { try { final String computedCallbackUrl = client.computeFinalCallbackUrl(context); - CodeVerifier verifier = (CodeVerifier) configuration.getValueRetriever() - .retrieve(client.getCodeVerifierSessionAttributeName(), client, context).orElse(null); + CodeVerifier verifier = + (CodeVerifier) + configuration + .getValueRetriever() + .retrieve(client.getCodeVerifierSessionAttributeName(), client, context) + .orElse(null); // Token request - final TokenRequest request = createTokenRequest(new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); + final TokenRequest request = + createTokenRequest( + new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier)); HTTPRequest tokenHttpRequest = request.toHTTPRequest(); tokenHttpRequest.setConnectTimeout(configuration.getConnectTimeout()); tokenHttpRequest.setReadTimeout(configuration.getReadTimeout()); final HTTPResponse httpResponse = tokenHttpRequest.send(); - logger.debug("Token response: status={}, content={}", httpResponse.getStatusCode(), + logger.debug( + "Token response: status={}, content={}", + httpResponse.getStatusCode(), httpResponse.getContent()); final TokenResponse response = OIDCTokenResponseParser.parse(httpResponse); if (response instanceof TokenErrorResponse) { - throw new TechnicalException("Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); + throw new TechnicalException( + "Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject()); } logger.debug("Token response successful"); final OIDCTokenResponse tokenSuccessResponse = (OIDCTokenResponse) response; @@ -178,11 +194,15 @@ public void validate(final OidcCredentials credentials, final WebContext context private TokenRequest createTokenRequest(final AuthorizationGrant grant) { if (clientAuthentication != null) { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - this.clientAuthentication, grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + this.clientAuthentication, + grant); } else { - return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(), - new ClientID(configuration.getClientId()), grant); + return new TokenRequest( + configuration.findProviderMetadata().getTokenEndpointURI(), + new ClientID(configuration.getClientId()), + grant); } } } diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index 24183f5c625da9..4d40f45cd09b48 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -3,7 +3,6 @@ import com.datahub.authentication.Authentication; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ObjectNode; - import java.nio.charset.StandardCharsets; import java.util.Objects; import javax.annotation.Nonnull; @@ -17,17 +16,16 @@ import org.apache.http.util.EntityUtils; import play.mvc.Http; - -/** - * This class is responsible for coordinating authentication with the backend Metadata Service. - */ +/** This class is responsible for coordinating authentication with the backend Metadata Service. */ @Slf4j public class AuthServiceClient { private static final String GENERATE_SESSION_TOKEN_ENDPOINT = "auth/generateSessionTokenForUser"; private static final String SIGN_UP_ENDPOINT = "auth/signUp"; - private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials"; - private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials"; + private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/resetNativeUserCredentials"; + private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = + "auth/verifyNativeUserCredentials"; private static final String TRACK_ENDPOINT = "auth/track"; private static final String ACCESS_TOKEN_FIELD = "accessToken"; private static final String USER_ID_FIELD = "userId"; @@ -39,7 +37,8 @@ public class AuthServiceClient { private static final String INVITE_TOKEN_FIELD = "inviteToken"; private static final String RESET_TOKEN_FIELD = "resetToken"; private static final String IS_NATIVE_USER_CREATED_FIELD = "isNativeUserCreated"; - private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = "areNativeUserCredentialsReset"; + private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = + "areNativeUserCredentialsReset"; private static final String DOES_PASSWORD_MATCH_FIELD = "doesPasswordMatch"; private final String metadataServiceHost; @@ -48,8 +47,11 @@ public class AuthServiceClient { private final Authentication systemAuthentication; private final CloseableHttpClient httpClient; - public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, - @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication, + public AuthServiceClient( + @Nonnull final String metadataServiceHost, + @Nonnull final Integer metadataServicePort, + @Nonnull final Boolean useSsl, + @Nonnull final Authentication systemAuthentication, @Nonnull final CloseableHttpClient httpClient) { this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost); this.metadataServicePort = Objects.requireNonNull(metadataServicePort); @@ -59,10 +61,11 @@ public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull fin } /** - * Call the Auth Service to generate a session token for a particular user with a unique actor id, or throws an exception if generation fails. + * Call the Auth Service to generate a session token for a particular user with a unique actor id, + * or throws an exception if generation fails. * - * Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of an Actor of type - * USER. + *

Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of + * an Actor of type USER. */ @Nonnull public String generateSessionTokenForUser(@Nonnull final String userId) { @@ -72,15 +75,21 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - GENERATE_SESSION_TOKEN_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + GENERATE_SESSION_TOKEN_ENDPOINT)); // Build JSON request to generate a token on behalf of a user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_ID_FIELD, userId); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -94,7 +103,8 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { return getAccessTokenFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", + String.format( + "Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { @@ -110,11 +120,14 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { } } - /** - * Call the Auth Service to create a native Datahub user. - */ - public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullName, @Nonnull final String email, - @Nonnull final String title, @Nonnull final String password, @Nonnull final String inviteToken) { + /** Call the Auth Service to create a native Datahub user. */ + public boolean signUp( + @Nonnull final String userUrn, + @Nonnull final String fullName, + @Nonnull final String email, + @Nonnull final String title, + @Nonnull final String password, + @Nonnull final String inviteToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(fullName, "fullName must not be null"); Objects.requireNonNull(email, "email must not be null"); @@ -126,9 +139,11 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - SIGN_UP_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, SIGN_UP_ENDPOINT)); // Build JSON request to sign up a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -139,7 +154,8 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN objectNode.put(TITLE_FIELD, title); objectNode.put(PASSWORD_FIELD, password); objectNode.put(INVITE_TOKEN_FIELD, inviteToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -152,11 +168,15 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN final String jsonStr = EntityUtils.toString(entity); return getIsNativeUserCreatedFromJson(jsonStr); } else { - String content = response.getEntity().getContent() == null ? "" : new String( - response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); + String content = + response.getEntity().getContent() == null + ? "" + : new String( + response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8); throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s Body: %s", response.getStatusLine().toString(), - response.getEntity().toString(), content)); + String.format( + "Bad response from the Metadata Service: %s %s Body: %s", + response.getStatusLine().toString(), response.getEntity().toString(), content)); } } catch (Exception e) { throw new RuntimeException(String.format("Failed to create user %s", userUrn), e); @@ -171,10 +191,10 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN } } - /** - * Call the Auth Service to reset credentials for a native DataHub user. - */ - public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password, + /** Call the Auth Service to reset credentials for a native DataHub user. */ + public boolean resetNativeUserCredentials( + @Nonnull final String userUrn, + @Nonnull final String password, @Nonnull final String resetToken) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); @@ -184,9 +204,14 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + RESET_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); @@ -194,7 +219,8 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); objectNode.put(RESET_TOKEN_FIELD, resetToken); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -208,8 +234,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul return getAreNativeUserCredentialsResetFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to reset credentials for user", e); @@ -224,10 +251,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul } } - /** - * Call the Auth Service to verify the credentials for a native Datahub user. - */ - public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) { + /** Call the Auth Service to verify the credentials for a native Datahub user. */ + public boolean verifyNativeUserCredentials( + @Nonnull final String userUrn, @Nonnull final String password) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); CloseableHttpResponse response = null; @@ -235,16 +261,22 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, + this.metadataServiceHost, + this.metadataServicePort, + VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT)); // Build JSON request to verify credentials for a native user. final ObjectMapper objectMapper = new ObjectMapper(); final ObjectNode objectNode = objectMapper.createObjectNode(); objectNode.put(USER_URN_FIELD, userUrn); objectNode.put(PASSWORD_FIELD, password); - final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); + final String json = + objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode); request.setEntity(new StringEntity(json, StandardCharsets.UTF_8)); // Add authorization header with DataHub frontend system id and secret. @@ -258,8 +290,9 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu return getDoesPasswordMatchFromJson(jsonStr); } else { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to verify credentials for user", e); @@ -274,18 +307,18 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu } } - /** - * Call the Auth Service to track an analytics event - */ + /** Call the Auth Service to track an analytics event */ public void track(@Nonnull final String event) { Objects.requireNonNull(event, "event must not be null"); CloseableHttpResponse response = null; try { final String protocol = this.metadataServiceUseSsl ? "https" : "http"; - final HttpPost request = new HttpPost( - String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, - TRACK_ENDPOINT)); + final HttpPost request = + new HttpPost( + String.format( + "%s://%s:%s/%s", + protocol, this.metadataServiceHost, this.metadataServicePort, TRACK_ENDPOINT)); // Build JSON request to track event. request.setEntity(new StringEntity(event, StandardCharsets.UTF_8)); @@ -298,8 +331,9 @@ public void track(@Nonnull final String event) { if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK || entity == null) { throw new RuntimeException( - String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), - response.getEntity().toString())); + String.format( + "Bad response from the Metadata Service: %s %s", + response.getStatusLine().toString(), response.getEntity().toString())); } } catch (Exception e) { throw new RuntimeException("Failed to track event", e); diff --git a/datahub-frontend/app/client/KafkaTrackingProducer.java b/datahub-frontend/app/client/KafkaTrackingProducer.java index 59e91a6d5a0f7f..b7173684b63500 100644 --- a/datahub-frontend/app/client/KafkaTrackingProducer.java +++ b/datahub-frontend/app/client/KafkaTrackingProducer.java @@ -3,6 +3,15 @@ import com.linkedin.metadata.config.kafka.ProducerConfiguration; import com.typesafe.config.Config; import config.ConfigurationProvider; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Properties; +import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; +import javax.inject.Inject; +import javax.inject.Singleton; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -15,98 +24,141 @@ import play.api.inject.ApplicationLifecycle; import utils.ConfigUtil; -import javax.inject.Inject; - -import javax.annotation.Nonnull; -import javax.inject.Singleton; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Properties; -import java.util.concurrent.CompletableFuture; - @Singleton public class KafkaTrackingProducer { - private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); - private static final List KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList( - Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(), - SecurityProtocol.SASL_PLAINTEXT.name())); - - private final Boolean _isEnabled; - private final KafkaProducer _producer; - - @Inject - public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle, final ConfigurationProvider configurationProvider) { - _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); - - if (_isEnabled) { - _logger.debug("Analytics tracking is enabled"); - _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); - - lifecycle.addStopHook( - () -> { - _producer.flush(); - _producer.close(); - return CompletableFuture.completedFuture(null); - }); - } else { - _logger.debug("Analytics tracking is disabled"); - _producer = null; - } - } - - public Boolean isEnabled() { - return _isEnabled; + private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName()); + private static final List KAFKA_SSL_PROTOCOLS = + Collections.unmodifiableList( + Arrays.asList( + SecurityProtocol.SSL.name(), + SecurityProtocol.SASL_SSL.name(), + SecurityProtocol.SASL_PLAINTEXT.name())); + + private final Boolean _isEnabled; + private final KafkaProducer _producer; + + @Inject + public KafkaTrackingProducer( + @Nonnull Config config, + ApplicationLifecycle lifecycle, + final ConfigurationProvider configurationProvider) { + _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled"); + + if (_isEnabled) { + _logger.debug("Analytics tracking is enabled"); + _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer()); + + lifecycle.addStopHook( + () -> { + _producer.flush(); + _producer.close(); + return CompletableFuture.completedFuture(null); + }); + } else { + _logger.debug("Analytics tracking is disabled"); + _producer = null; } - - public void send(ProducerRecord record) { - _producer.send(record); + } + + public Boolean isEnabled() { + return _isEnabled; + } + + public void send(ProducerRecord record) { + _producer.send(record); + } + + private static KafkaProducer createKafkaProducer( + Config config, ProducerConfiguration producerConfiguration) { + final Properties props = new Properties(); + props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); + props.put( + ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, + config.getString("analytics.kafka.delivery.timeout.ms")); + props.put( + ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, + config.getString("analytics.kafka.bootstrap.server")); + props.put( + ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. + props.put( + ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, + "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. + props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); + props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); + + final String securityProtocolConfig = "analytics.kafka.security.protocol"; + if (config.hasPath(securityProtocolConfig) + && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { + props.put( + CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); + setConfig( + config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); + + setConfig( + config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.keystore.location"); + setConfig( + config, + props, + SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.keystore.password"); + + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, + "analytics.kafka.ssl.truststore.type"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, + "analytics.kafka.ssl.truststore.location"); + setConfig( + config, + props, + SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, + "analytics.kafka.ssl.truststore.password"); + + setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); + setConfig( + config, + props, + SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, + "analytics.kafka.ssl.endpoint.identification.algorithm"); + + final String securityProtocol = config.getString(securityProtocolConfig); + if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) + || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { + setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); + setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); + setConfig( + config, + props, + SaslConfigs.SASL_KERBEROS_SERVICE_NAME, + "analytics.kafka.sasl.kerberos.service.name"); + setConfig( + config, + props, + SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.login.callback.handler.class"); + setConfig( + config, + props, + SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, + "analytics.kafka.sasl.client.callback.handler.class"); + } } - private static KafkaProducer createKafkaProducer(Config config, ProducerConfiguration producerConfiguration) { - final Properties props = new Properties(); - props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend"); - props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.getString("analytics.kafka.delivery.timeout.ms")); - props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("analytics.kafka.bootstrap.server")); - props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn. - props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // JSON object. - props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize()); - props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType()); - - final String securityProtocolConfig = "analytics.kafka.security.protocol"; - if (config.hasPath(securityProtocolConfig) - && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) { - props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig)); - setConfig(config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password"); - - setConfig(config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.keystore.location"); - setConfig(config, props, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.keystore.password"); - - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "analytics.kafka.ssl.truststore.type"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.truststore.location"); - setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.truststore.password"); - - setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol"); - setConfig(config, props, SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "analytics.kafka.ssl.endpoint.identification.algorithm"); - - final String securityProtocol = config.getString(securityProtocolConfig); - if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name()) - || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) { - setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism"); - setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config"); - setConfig(config, props, SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "analytics.kafka.sasl.kerberos.service.name"); - setConfig(config, props, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.login.callback.handler.class"); - setConfig(config, props, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.client.callback.handler.class"); - } - } - - return new org.apache.kafka.clients.producer.KafkaProducer(props); - } + return new org.apache.kafka.clients.producer.KafkaProducer(props); + } - private static void setConfig(Config config, Properties props, String key, String configKey) { - Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) - .ifPresent(v -> props.put(key, v)); - } + private static void setConfig(Config config, Properties props, String key, String configKey) { + Optional.ofNullable(ConfigUtil.getString(config, configKey, null)) + .ifPresent(v -> props.put(key, v)); + } } diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java index 8f526c831b5c9b..3d87267f8ebe38 100644 --- a/datahub-frontend/app/config/ConfigurationProvider.java +++ b/datahub-frontend/app/config/ConfigurationProvider.java @@ -4,28 +4,22 @@ import com.linkedin.metadata.config.kafka.KafkaConfiguration; import com.linkedin.metadata.spring.YamlPropertySourceFactory; import lombok.Data; - import org.springframework.boot.context.properties.ConfigurationProperties; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.PropertySource; - /** - * Minimal sharing between metadata-service and frontend - * Does not use the factories module to avoid transitive dependencies. + * Minimal sharing between metadata-service and frontend Does not use the factories module to avoid + * transitive dependencies. */ @EnableConfigurationProperties @PropertySource(value = "application.yml", factory = YamlPropertySourceFactory.class) @ConfigurationProperties @Data public class ConfigurationProvider { - /** - * Kafka related configs. - */ - private KafkaConfiguration kafka; + /** Kafka related configs. */ + private KafkaConfiguration kafka; - /** - * Configuration for caching - */ - private CacheConfiguration cache; + /** Configuration for caching */ + private CacheConfiguration cache; } diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java index 5c76f2572a9360..60971bf06e27bd 100644 --- a/datahub-frontend/app/controllers/Application.java +++ b/datahub-frontend/app/controllers/Application.java @@ -1,5 +1,8 @@ package controllers; +import static auth.AuthUtils.ACTOR; +import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; + import akka.actor.ActorSystem; import akka.stream.ActorMaterializer; import akka.stream.Materializer; @@ -9,41 +12,35 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import com.linkedin.util.Pair; import com.typesafe.config.Config; - +import java.io.InputStream; +import java.time.Duration; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.stream.Collectors; - +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import javax.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import play.Environment; import play.http.HttpEntity; +import play.libs.Json; import play.libs.ws.InMemoryBodyWritable; import play.libs.ws.StandaloneWSClient; -import play.libs.Json; import play.libs.ws.ahc.StandaloneAhcWSClient; import play.mvc.Controller; import play.mvc.Http; import play.mvc.ResponseHeader; import play.mvc.Result; -import javax.annotation.Nonnull; -import javax.annotation.Nullable; -import javax.inject.Inject; -import java.io.InputStream; import play.mvc.Security; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.AsyncHttpClientConfig; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClient; import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClientConfig; import utils.ConfigUtil; -import java.time.Duration; - -import static auth.AuthUtils.ACTOR; -import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME; - public class Application extends Controller { private final Logger _logger = LoggerFactory.getLogger(Application.class.getName()); @@ -61,22 +58,17 @@ public Application(Environment environment, @Nonnull Config config) { /** * Serves the build output index.html for any given path * - * @param path takes a path string, which essentially is ignored - * routing is managed client side + * @param path takes a path string, which essentially is ignored routing is managed client side * @return {Result} build output index.html resource */ @Nonnull private Result serveAsset(@Nullable String path) { try { InputStream indexHtml = _environment.resourceAsStream("public/index.html"); - return ok(indexHtml) - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return ok(indexHtml).withHeader("Cache-Control", "no-cache").as("text/html"); } catch (Exception e) { _logger.warn("Cannot load public/index.html resource. Static assets or assets jar missing?"); - return notFound() - .withHeader("Cache-Control", "no-cache") - .as("text/html"); + return notFound().withHeader("Cache-Control", "no-cache").as("text/html"); } } @@ -99,66 +91,87 @@ public Result index(@Nullable String path) { /** * Proxies requests to the Metadata Service * - * TODO: Investigate using mutual SSL authentication to call Metadata Service. + *

TODO: Investigate using mutual SSL authentication to call Metadata Service. */ @Security.Authenticated(Authenticator.class) - public CompletableFuture proxy(String path, Http.Request request) throws ExecutionException, InterruptedException { + public CompletableFuture proxy(String path, Http.Request request) + throws ExecutionException, InterruptedException { final String authorizationHeaderValue = getAuthorizationHeaderValueToProxy(request); final String resolvedUri = mapPath(request.uri()); - final String metadataServiceHost = ConfigUtil.getString( - _config, - ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); - final int metadataServicePort = ConfigUtil.getInt( - _config, - ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); - final boolean metadataServiceUseSsl = ConfigUtil.getBoolean( - _config, - ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, - ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL - ); + final String metadataServiceHost = + ConfigUtil.getString( + _config, + ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_HOST); + final int metadataServicePort = + ConfigUtil.getInt( + _config, + ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_PORT); + final boolean metadataServiceUseSsl = + ConfigUtil.getBoolean( + _config, + ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH, + ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL); // TODO: Fully support custom internal SSL. final String protocol = metadataServiceUseSsl ? "https" : "http"; final Map> headers = request.getHeaders().toMap(); - if (headers.containsKey(Http.HeaderNames.HOST) && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { - headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); + if (headers.containsKey(Http.HeaderNames.HOST) + && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) { + headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST)); } - return _ws.url(String.format("%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) + return _ws.url( + String.format( + "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri)) .setMethod(request.method()) - .setHeaders(headers - .entrySet() - .stream() - // Remove X-DataHub-Actor to prevent malicious delegation. - .filter(entry -> !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) - // Remove Host s.th. service meshes do not route to wrong host - .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) - .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)) - ) + .setHeaders( + headers.entrySet().stream() + // Remove X-DataHub-Actor to prevent malicious delegation. + .filter( + entry -> + !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase( + entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey())) + // Remove Host s.th. service meshes do not route to wrong host + .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey())) + .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))) .addHeader(Http.HeaderNames.AUTHORIZATION, authorizationHeaderValue) - .addHeader(AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) - .setBody(new InMemoryBodyWritable(ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), "application/json")) + .addHeader( + AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request)) + .setBody( + new InMemoryBodyWritable( + ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), + "application/json")) .setRequestTimeout(Duration.ofSeconds(120)) .execute() - .thenApply(apiResponse -> { - final ResponseHeader header = new ResponseHeader(apiResponse.getStatus(), apiResponse.getHeaders() - .entrySet() - .stream() - .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) - .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) - .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) - .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); - final HttpEntity body = new HttpEntity.Strict(apiResponse.getBodyAsBytes(), Optional.ofNullable(apiResponse.getContentType())); - return new Result(header, body); - }).toCompletableFuture(); + .thenApply( + apiResponse -> { + final ResponseHeader header = + new ResponseHeader( + apiResponse.getStatus(), + apiResponse.getHeaders().entrySet().stream() + .filter( + entry -> + !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey())) + .filter( + entry -> + !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey())) + .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue()))) + .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond))); + final HttpEntity body = + new HttpEntity.Strict( + apiResponse.getBodyAsBytes(), + Optional.ofNullable(apiResponse.getContentType())); + return new Result(header, body); + }) + .toCompletableFuture(); } /** @@ -173,11 +186,13 @@ public Result appConfig() { config.put("appVersion", _config.getString("app.version")); config.put("isInternal", _config.getBoolean("linkedin.internal")); config.put("shouldShowDatasetLineage", _config.getBoolean("linkedin.show.dataset.lineage")); - config.put("suggestionConfidenceThreshold", + config.put( + "suggestionConfidenceThreshold", Integer.valueOf(_config.getString("linkedin.suggestion.confidence.threshold"))); config.set("wikiLinks", wikiLinks()); config.set("tracking", trackingInfo()); - // In a staging environment, we can trigger this flag to be true so that the UI can handle based on + // In a staging environment, we can trigger this flag to be true so that the UI can handle based + // on // such config and alert users that their changes will not affect production data config.put("isStagingBanner", _config.getBoolean("ui.show.staging.banner")); config.put("isLiveDataWarning", _config.getBoolean("ui.show.live.data.banner")); @@ -206,6 +221,7 @@ public Result appConfig() { /** * Creates a JSON object of profile / avatar properties + * * @return Json avatar / profile image properties */ @Nonnull @@ -273,23 +289,26 @@ private StandaloneWSClient createWsClient() { } /** - * Returns the value of the Authorization Header to be provided when proxying requests to the downstream Metadata Service. + * Returns the value of the Authorization Header to be provided when proxying requests to the + * downstream Metadata Service. * - * Currently, the Authorization header value may be derived from + *

Currently, the Authorization header value may be derived from * - * a) The value of the "token" attribute of the Session Cookie provided by the client. This value is set - * when creating the session token initially from a token granted by the Metadata Service. + *

a) The value of the "token" attribute of the Session Cookie provided by the client. This + * value is set when creating the session token initially from a token granted by the Metadata + * Service. * - * Or if the "token" attribute cannot be found in a session cookie, then we fallback to + *

Or if the "token" attribute cannot be found in a session cookie, then we fallback to * - * b) The value of the Authorization - * header provided in the original request. This will be used in cases where clients are making programmatic requests - * to Metadata Service APIs directly, without providing a session cookie (ui only). + *

b) The value of the Authorization header provided in the original request. This will be used + * in cases where clients are making programmatic requests to Metadata Service APIs directly, + * without providing a session cookie (ui only). * - * If neither are found, an empty string is returned. + *

If neither are found, an empty string is returned. */ private String getAuthorizationHeaderValueToProxy(Http.Request request) { - // If the session cookie has an authorization token, use that. If there's an authorization header provided, simply + // If the session cookie has an authorization token, use that. If there's an authorization + // header provided, simply // use that. String value = ""; if (request.session().data().containsKey(SESSION_COOKIE_GMS_TOKEN_NAME)) { @@ -301,11 +320,13 @@ private String getAuthorizationHeaderValueToProxy(Http.Request request) { } /** - * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This is sent along - * with any requests that have a valid frontend session cookie to identify the calling actor, for backwards compatibility. + * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This + * is sent along with any requests that have a valid frontend session cookie to identify the + * calling actor, for backwards compatibility. * - * If Metadata Service authentication is enabled, this value is not required because Actor context will most often come - * from the authentication credentials provided in the Authorization header. + *

If Metadata Service authentication is enabled, this value is not required because Actor + * context will most often come from the authentication credentials provided in the Authorization + * header. */ private String getDataHubActorHeader(Http.Request request) { String actor = request.session().data().get(ACTOR); diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index e28d4ba2ee37eb..9c232e965a0034 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -1,5 +1,9 @@ package controllers; +import static auth.AuthUtils.*; +import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; +import static org.pac4j.play.store.PlayCookieSessionStore.*; + import auth.AuthUtils; import auth.CookieConfigs; import auth.JAASConfigs; @@ -35,325 +39,337 @@ import play.mvc.Results; import security.AuthenticationManager; -import static auth.AuthUtils.*; -import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX; -import static org.pac4j.play.store.PlayCookieSessionStore.*; - - // TODO add logging. public class AuthenticationController extends Controller { - public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; - private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; - private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; - private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; - - private static final String SSO_NO_REDIRECT_MESSAGE = "SSO is configured, however missing redirect from idp"; - - private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); - private final CookieConfigs _cookieConfigs; - private final JAASConfigs _jaasConfigs; - private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; - private final boolean _verbose; - - @Inject - private org.pac4j.core.config.Config _ssoConfig; - - @Inject - private PlaySessionStore _playSessionStore; - - @Inject - private SsoManager _ssoManager; - - @Inject - AuthServiceClient _authClient; - - @Inject - public AuthenticationController(@Nonnull Config configs) { - _cookieConfigs = new CookieConfigs(configs); - _jaasConfigs = new JAASConfigs(configs); - _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); - _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging"; + private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri"; + private static final String ERROR_MESSAGE_URI_PARAM = "error_msg"; + private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured"; + + private static final String SSO_NO_REDIRECT_MESSAGE = + "SSO is configured, however missing redirect from idp"; + + private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName()); + private final CookieConfigs _cookieConfigs; + private final JAASConfigs _jaasConfigs; + private final NativeAuthenticationConfigs _nativeAuthenticationConfigs; + private final boolean _verbose; + + @Inject private org.pac4j.core.config.Config _ssoConfig; + + @Inject private PlaySessionStore _playSessionStore; + + @Inject private SsoManager _ssoManager; + + @Inject AuthServiceClient _authClient; + + @Inject + public AuthenticationController(@Nonnull Config configs) { + _cookieConfigs = new CookieConfigs(configs); + _jaasConfigs = new JAASConfigs(configs); + _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs); + _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING); + } + + /** + * Route used to perform authentication, or redirect to log in if authentication fails. + * + *

If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider + * (Indirect auth). If not, we will fall back to the default username / password login experience + * (Direct auth). + */ + @Nonnull + public Result authenticate(Http.Request request) { + + // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is + // authenticated. + + final Optional maybeRedirectPath = + Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); + final String redirectPath = maybeRedirectPath.orElse("/"); + + if (AuthUtils.hasValidSessionCookie(request)) { + return Results.redirect(redirectPath); } - /** - * Route used to perform authentication, or redirect to log in if authentication fails. - * - * If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider (Indirect auth). - * If not, we will fall back to the default username / password login experience (Direct auth). - */ - @Nonnull - public Result authenticate(Http.Request request) { - - // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is authenticated. - - final Optional maybeRedirectPath = Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM)); - final String redirectPath = maybeRedirectPath.orElse("/"); - - if (AuthUtils.hasValidSessionCookie(request)) { - return Results.redirect(redirectPath); - } - - // 1. If SSO is enabled, redirect to IdP if not authenticated. - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, redirectPath).orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - - // 2. If either JAAS auth or Native auth is enabled, fallback to it - if (_jaasConfigs.isJAASEnabled() || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { - return Results.redirect( - LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); - } - - // 3. If no auth enabled, fallback to using default user account & redirect. - // Generate GMS session token, TODO: - final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); - return Results.redirect(redirectPath).withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) - .withCookies( - createActorCookie( - DEFAULT_ACTOR_URN.toString(), - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); + // 1. If SSO is enabled, redirect to IdP if not authenticated. + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, redirectPath) + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); } - /** - * Redirect to the identity provider for authentication. - */ - @Nonnull - public Result sso(Http.Request request) { - if (_ssoManager.isSsoEnabled()) { - return redirectToIdentityProvider(request, "/").orElse( - Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)) - ); - } - return Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + // 2. If either JAAS auth or Native auth is enabled, fallback to it + if (_jaasConfigs.isJAASEnabled() + || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) { + return Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath))); } - /** - * Log in a user based on a username + password. - * - * TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the default. - */ - @Nonnull - public Result logIn(Http.Request request) { - boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); - _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; - if (noAuthEnabled) { - String message = "Neither JAAS nor native authentication is enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } - - final JsonNode json = request.body().asJson(); - final String username = json.findPath(USER_NAME).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - - if (StringUtils.isBlank(username)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); - return Results.badRequest(invalidCredsJson); - } - - JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); - boolean loginSucceeded = tryLogin(username, password); - - if (!loginSucceeded) { - return Results.badRequest(invalidCredsJson); - } - - final Urn actorUrn = new CorpuserUrn(username); - final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); - return createSession(actorUrn.toString(), accessToken); + // 3. If no auth enabled, fallback to using default user account & redirect. + // Generate GMS session token, TODO: + final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId()); + return Results.redirect(redirectPath) + .withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken)) + .withCookies( + createActorCookie( + DEFAULT_ACTOR_URN.toString(), + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } + + /** Redirect to the identity provider for authentication. */ + @Nonnull + public Result sso(Http.Request request) { + if (_ssoManager.isSsoEnabled()) { + return redirectToIdentityProvider(request, "/") + .orElse( + Results.redirect( + LOGIN_ROUTE + + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))); + } + return Results.redirect( + LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE)); + } + + /** + * Log in a user based on a username + password. + * + *

TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the + * default. + */ + @Nonnull + public Result logIn(Http.Request request) { + boolean jaasEnabled = _jaasConfigs.isJAASEnabled(); + _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled)); + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled; + if (noAuthEnabled) { + String message = "Neither JAAS nor native authentication is enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); } - /** - * Sign up a native user based on a name, email, title, and password. The invite token must match an existing invite token. - * - */ - @Nonnull - public Result signUp(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return Results.badRequest(error); - } + final JsonNode json = request.body().asJson(); + final String username = json.findPath(USER_NAME).textValue(); + final String password = json.findPath(PASSWORD).textValue(); - final JsonNode json = request.body().asJson(); - final String fullName = json.findPath(FULL_NAME).textValue(); - final String email = json.findPath(EMAIL).textValue(); - final String title = json.findPath(TITLE).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); + if (StringUtils.isBlank(username)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(fullName)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); - return Results.badRequest(invalidCredsJson); - } + JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials"); + boolean loginSucceeded = tryLogin(username, password); - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { - Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); - if (!emailValidator.isValid(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } - } + if (!loginSucceeded) { + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn actorUrn = new CorpuserUrn(username); + final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); + return createSession(actorUrn.toString(), accessToken); + } + + /** + * Sign up a native user based on a name, email, title, and password. The invite token must match + * an existing invite token. + */ + @Nonnull + public Result signUp(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return Results.badRequest(error); + } - if (StringUtils.isBlank(title)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String fullName = json.findPath(FULL_NAME).textValue(); + final String email = json.findPath(EMAIL).textValue(); + final String title = json.findPath(TITLE).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String inviteToken = json.findPath(INVITE_TOKEN).textValue(); - if (StringUtils.isBlank(inviteToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Invite token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(fullName)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } + if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) { + Constraints.EmailValidator emailValidator = new Constraints.EmailValidator(); + if (!emailValidator.isValid(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); + } } - /** - * Reset a native user's credentials based on a username, old password, and new password. - * - */ - @Nonnull - public Result resetNativeUserCredentials(Http.Request request) { - boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); - _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); - if (!nativeAuthenticationEnabled) { - String message = "Native authentication is not enabled on the server."; - final ObjectNode error = Json.newObject(); - error.put("message", message); - return badRequest(error); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); + } - final JsonNode json = request.body().asJson(); - final String email = json.findPath(EMAIL).textValue(); - final String password = json.findPath(PASSWORD).textValue(); - final String resetToken = json.findPath(RESET_TOKEN).textValue(); + if (StringUtils.isBlank(title)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(email)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); - return Results.badRequest(invalidCredsJson); - } + if (StringUtils.isBlank(inviteToken)) { + JsonNode invalidCredsJson = + Json.newObject().put("message", "Invite token must not be empty."); + return Results.badRequest(invalidCredsJson); + } - if (StringUtils.isBlank(password)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + /** Reset a native user's credentials based on a username, old password, and new password. */ + @Nonnull + public Result resetNativeUserCredentials(Http.Request request) { + boolean nativeAuthenticationEnabled = + _nativeAuthenticationConfigs.isNativeAuthenticationEnabled(); + _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled)); + if (!nativeAuthenticationEnabled) { + String message = "Native authentication is not enabled on the server."; + final ObjectNode error = Json.newObject(); + error.put("message", message); + return badRequest(error); + } - if (StringUtils.isBlank(resetToken)) { - JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); - return Results.badRequest(invalidCredsJson); - } + final JsonNode json = request.body().asJson(); + final String email = json.findPath(EMAIL).textValue(); + final String password = json.findPath(PASSWORD).textValue(); + final String resetToken = json.findPath(RESET_TOKEN).textValue(); - final Urn userUrn = new CorpuserUrn(email); - final String userUrnString = userUrn.toString(); - _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); - final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return createSession(userUrnString, accessToken); + if (StringUtils.isBlank(email)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty."); + return Results.badRequest(invalidCredsJson); } - private Optional redirectToIdentityProvider(Http.RequestHeader request, String redirectPath) { - final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); - final Client client = _ssoManager.getSsoProvider().client(); - configurePac4jSessionStore(playWebContext, client, redirectPath); - try { - final Optional action = client.getRedirectionAction(playWebContext); - return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); - } catch (Exception e) { - if (_verbose) { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", e); - } else { - _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); - } - return Optional.of(Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8)))); - } + if (StringUtils.isBlank(password)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty."); + return Results.badRequest(invalidCredsJson); } - private void configurePac4jSessionStore(PlayWebContext context, Client client, String redirectPath) { - // Set the originally requested path for post-auth redirection. We split off into a separate cookie from the session - // to reduce size of the session cookie - FoundAction foundAction = new FoundAction(redirectPath); - byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); - String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); - context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); - // This is to prevent previous login attempts from being cached. - // We replicate the logic here, which is buried in the Pac4j client. - if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) != null) { - _logger.debug("Found previous login attempt. Removing it manually to prevent unexpected errors."); - _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); - } + if (StringUtils.isBlank(resetToken)) { + JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty."); + return Results.badRequest(invalidCredsJson); } - private String encodeRedirectUri(final String redirectUri) { - return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + final Urn userUrn = new CorpuserUrn(email); + final String userUrnString = userUrn.toString(); + _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); + final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); + return createSession(userUrnString, accessToken); + } + + private Optional redirectToIdentityProvider( + Http.RequestHeader request, String redirectPath) { + final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore); + final Client client = _ssoManager.getSsoProvider().client(); + configurePac4jSessionStore(playWebContext, client, redirectPath); + try { + final Optional action = client.getRedirectionAction(playWebContext); + return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext)); + } catch (Exception e) { + if (_verbose) { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", + e); + } else { + _logger.error( + "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured"); + } + return Optional.of( + Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8)))); } - - private boolean tryLogin(String username, String password) { - boolean loginSucceeded = false; - - // First try jaas login, if enabled - if (_jaasConfigs.isJAASEnabled()) { - try { - _logger.debug("Attempting jaas authentication"); - AuthenticationManager.authenticateJaasUser(username, password); - _logger.debug("Jaas authentication successful. Login succeeded"); - loginSucceeded = true; - } catch (Exception e) { - if (_verbose) { - _logger.debug("Jaas authentication error. Login failed", e); - } else { - _logger.debug("Jaas authentication error. Login failed"); - } - } - } - - // If jaas login fails or is disabled, try native auth login - if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { - final Urn userUrn = new CorpuserUrn(username); - final String userUrnString = userUrn.toString(); - loginSucceeded = loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); + } + + private void configurePac4jSessionStore( + PlayWebContext context, Client client, String redirectPath) { + // Set the originally requested path for post-auth redirection. We split off into a separate + // cookie from the session + // to reduce size of the session cookie + FoundAction foundAction = new FoundAction(redirectPath); + byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction); + String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes)); + context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized)); + // This is to prevent previous login attempts from being cached. + // We replicate the logic here, which is buried in the Pac4j client. + if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) + != null) { + _logger.debug( + "Found previous login attempt. Removing it manually to prevent unexpected errors."); + _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, ""); + } + } + + private String encodeRedirectUri(final String redirectUri) { + return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8); + } + + private boolean tryLogin(String username, String password) { + boolean loginSucceeded = false; + + // First try jaas login, if enabled + if (_jaasConfigs.isJAASEnabled()) { + try { + _logger.debug("Attempting jaas authentication"); + AuthenticationManager.authenticateJaasUser(username, password); + _logger.debug("Jaas authentication successful. Login succeeded"); + loginSucceeded = true; + } catch (Exception e) { + if (_verbose) { + _logger.debug("Jaas authentication error. Login failed", e); + } else { + _logger.debug("Jaas authentication error. Login failed"); } - - return loginSucceeded; + } } - private Result createSession(String userUrnString, String accessToken) { - return Results.ok().withSession(createSessionMap(userUrnString, accessToken)) - .withCookies( - createActorCookie( - userUrnString, - _cookieConfigs.getTtlInHours(), - _cookieConfigs.getAuthCookieSameSite(), - _cookieConfigs.getAuthCookieSecure() - ) - ); - + // If jaas login fails or is disabled, try native auth login + if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) { + final Urn userUrn = new CorpuserUrn(username); + final String userUrnString = userUrn.toString(); + loginSucceeded = + loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password); } -} \ No newline at end of file + + return loginSucceeded; + } + + private Result createSession(String userUrnString, String accessToken) { + return Results.ok() + .withSession(createSessionMap(userUrnString, accessToken)) + .withCookies( + createActorCookie( + userUrnString, + _cookieConfigs.getTtlInHours(), + _cookieConfigs.getAuthCookieSameSite(), + _cookieConfigs.getAuthCookieSecure())); + } +} diff --git a/datahub-frontend/app/controllers/CentralLogoutController.java b/datahub-frontend/app/controllers/CentralLogoutController.java index 5e24fe9f8220cf..eea1c662ebf894 100644 --- a/datahub-frontend/app/controllers/CentralLogoutController.java +++ b/datahub-frontend/app/controllers/CentralLogoutController.java @@ -2,18 +2,15 @@ import com.typesafe.config.Config; import java.net.URLEncoder; +import java.nio.charset.StandardCharsets; +import javax.inject.Inject; import lombok.extern.slf4j.Slf4j; import org.pac4j.play.LogoutController; import play.mvc.Http; import play.mvc.Result; import play.mvc.Results; -import javax.inject.Inject; -import java.nio.charset.StandardCharsets; - -/** - * Responsible for handling logout logic with oidc providers - */ +/** Responsible for handling logout logic with oidc providers */ @Slf4j public class CentralLogoutController extends LogoutController { private static final String AUTH_URL_CONFIG_PATH = "/login"; @@ -28,26 +25,27 @@ public CentralLogoutController(Config config) { setLogoutUrlPattern(DEFAULT_BASE_URL_PATH + ".*"); setLocalLogout(true); setCentralLogout(true); - } - /** - * logout() method should not be called if oidc is not enabled - */ + /** logout() method should not be called if oidc is not enabled */ public Result executeLogout(Http.Request request) { if (_isOidcEnabled) { try { return logout(request).toCompletableFuture().get().withNewSession(); } catch (Exception e) { - log.error("Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", e); + log.error( + "Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", + e); return redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode("Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " - + "or refer to server logs for more information.", StandardCharsets.UTF_8))) - .withNewSession(); + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, " + + "or refer to server logs for more information.", + StandardCharsets.UTF_8))) + .withNewSession(); } } - return Results.redirect(AUTH_URL_CONFIG_PATH) - .withNewSession(); + return Results.redirect(AUTH_URL_CONFIG_PATH).withNewSession(); } } diff --git a/datahub-frontend/app/controllers/SsoCallbackController.java b/datahub-frontend/app/controllers/SsoCallbackController.java index 7a4b5585cc21ab..9f4445b1aa5c7f 100644 --- a/datahub-frontend/app/controllers/SsoCallbackController.java +++ b/datahub-frontend/app/controllers/SsoCallbackController.java @@ -1,6 +1,9 @@ package controllers; import auth.CookieConfigs; +import auth.sso.SsoManager; +import auth.sso.SsoProvider; +import auth.sso.oidc.OidcCallbackLogic; import client.AuthServiceClient; import com.datahub.authentication.Authentication; import com.linkedin.entity.client.SystemEntityClient; @@ -18,17 +21,13 @@ import org.pac4j.play.PlayWebContext; import play.mvc.Http; import play.mvc.Result; -import auth.sso.oidc.OidcCallbackLogic; -import auth.sso.SsoManager; -import auth.sso.SsoProvider; import play.mvc.Results; - /** * A dedicated Controller for handling redirects to DataHub by 3rd-party Identity Providers after * off-platform authentication. * - * Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines + *

Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines * the handling logic to invoke. */ @Slf4j @@ -46,56 +45,88 @@ public SsoCallbackController( _ssoManager = ssoManager; setDefaultUrl("/"); // By default, redirects to Home Page on log in. setSaveInSession(false); - setCallbackLogic(new SsoCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, new CookieConfigs(configs))); + setCallbackLogic( + new SsoCallbackLogic( + ssoManager, + systemAuthentication, + entityClient, + authClient, + new CookieConfigs(configs))); } public CompletionStage handleCallback(String protocol, Http.Request request) { if (shouldHandleCallback(protocol)) { log.debug(String.format("Handling SSO callback. Protocol: %s", protocol)); - return callback(request).handle((res, e) -> { - if (e != null) { - log.error("Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", e); - return Results.redirect( - String.format("/login?error_msg=%s", - URLEncoder.encode( - "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", - StandardCharsets.UTF_8))) - .discardingCookie("actor") - .withNewSession(); - } - return res; - }); + return callback(request) + .handle( + (res, e) -> { + if (e != null) { + log.error( + "Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", + e); + return Results.redirect( + String.format( + "/login?error_msg=%s", + URLEncoder.encode( + "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.", + StandardCharsets.UTF_8))) + .discardingCookie("actor") + .withNewSession(); + } + return res; + }); } - return CompletableFuture.completedFuture(Results.internalServerError( - String.format("Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); + return CompletableFuture.completedFuture( + Results.internalServerError( + String.format( + "Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol))); } - - /** - * Logic responsible for delegating to protocol-specific callback logic. - */ + /** Logic responsible for delegating to protocol-specific callback logic. */ public class SsoCallbackLogic implements CallbackLogic { private final OidcCallbackLogic _oidcCallbackLogic; - SsoCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication, - final SystemEntityClient entityClient, final AuthServiceClient authClient, final CookieConfigs cookieConfigs) { - _oidcCallbackLogic = new OidcCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); + SsoCallbackLogic( + final SsoManager ssoManager, + final Authentication systemAuthentication, + final SystemEntityClient entityClient, + final AuthServiceClient authClient, + final CookieConfigs cookieConfigs) { + _oidcCallbackLogic = + new OidcCallbackLogic( + ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs); } @Override - public Result perform(PlayWebContext context, Config config, - HttpActionAdapter httpActionAdapter, String defaultUrl, Boolean saveInSession, - Boolean multiProfile, Boolean renewSession, String defaultClient) { + public Result perform( + PlayWebContext context, + Config config, + HttpActionAdapter httpActionAdapter, + String defaultUrl, + Boolean saveInSession, + Boolean multiProfile, + Boolean renewSession, + String defaultClient) { if (SsoProvider.SsoProtocol.OIDC.equals(_ssoManager.getSsoProvider().protocol())) { - return _oidcCallbackLogic.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, defaultClient); + return _oidcCallbackLogic.perform( + context, + config, + httpActionAdapter, + defaultUrl, + saveInSession, + multiProfile, + renewSession, + defaultClient); } // Should never occur. - throw new UnsupportedOperationException("Failed to find matching SSO Provider. Only one supported is OIDC."); + throw new UnsupportedOperationException( + "Failed to find matching SSO Provider. Only one supported is OIDC."); } } private boolean shouldHandleCallback(final String protocol) { - return _ssoManager.isSsoEnabled() && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); + return _ssoManager.isSsoEnabled() + && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol); } } diff --git a/datahub-frontend/app/controllers/TrackingController.java b/datahub-frontend/app/controllers/TrackingController.java index 776ab5cad58ff0..254a8cc640d0c5 100644 --- a/datahub-frontend/app/controllers/TrackingController.java +++ b/datahub-frontend/app/controllers/TrackingController.java @@ -1,14 +1,15 @@ package controllers; +import static auth.AuthUtils.ACTOR; + import auth.Authenticator; import client.AuthServiceClient; +import client.KafkaTrackingProducer; import com.fasterxml.jackson.databind.JsonNode; import com.typesafe.config.Config; import javax.annotation.Nonnull; import javax.inject.Inject; import javax.inject.Singleton; - - import org.apache.kafka.clients.producer.ProducerRecord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -16,57 +17,52 @@ import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; -import client.KafkaTrackingProducer; - -import static auth.AuthUtils.ACTOR; - // TODO: Migrate this to metadata-service. @Singleton public class TrackingController extends Controller { - private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); + private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); - private final String _topic; + private final String _topic; - @Inject - KafkaTrackingProducer _producer; + @Inject KafkaTrackingProducer _producer; - @Inject - AuthServiceClient _authClient; + @Inject AuthServiceClient _authClient; - @Inject - public TrackingController(@Nonnull Config config) { - _topic = config.getString("analytics.tracking.topic"); - } + @Inject + public TrackingController(@Nonnull Config config) { + _topic = config.getString("analytics.tracking.topic"); + } - @Security.Authenticated(Authenticator.class) - @Nonnull - public Result track(Http.Request request) throws Exception { - if (!_producer.isEnabled()) { - // If tracking is disabled, simply return a 200. - return status(200); - } + @Security.Authenticated(Authenticator.class) + @Nonnull + public Result track(Http.Request request) throws Exception { + if (!_producer.isEnabled()) { + // If tracking is disabled, simply return a 200. + return status(200); + } - JsonNode event; - try { - event = request.body().asJson(); - } catch (Exception e) { - return badRequest(); - } - final String actor = request.session().data().get(ACTOR); - try { - _logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); - final ProducerRecord record = new ProducerRecord<>( - _topic, - actor, - event.toString()); - _producer.send(record); - _authClient.track(event.toString()); - return ok(); - } catch (Exception e) { - _logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event)); - return internalServerError(e.getMessage()); - } + JsonNode event; + try { + event = request.body().asJson(); + } catch (Exception e) { + return badRequest(); + } + final String actor = request.session().data().get(ACTOR); + try { + _logger.debug( + String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); + final ProducerRecord record = + new ProducerRecord<>(_topic, actor, event.toString()); + _producer.send(record); + _authClient.track(event.toString()); + return ok(); + } catch (Exception e) { + _logger.error( + String.format( + "Failed to emit product analytics event. actor: %s, event: %s", actor, event)); + return internalServerError(e.getMessage()); } + } } diff --git a/datahub-frontend/app/security/AuthUtil.java b/datahub-frontend/app/security/AuthUtil.java index 8af90b37a6f31b..55752644ada706 100644 --- a/datahub-frontend/app/security/AuthUtil.java +++ b/datahub-frontend/app/security/AuthUtil.java @@ -8,52 +8,53 @@ import javax.crypto.spec.SecretKeySpec; import org.apache.commons.codec.digest.HmacAlgorithms; - -/** - * Auth Utils - * Adheres to HSEC requirement for creating application tokens - */ +/** Auth Utils Adheres to HSEC requirement for creating application tokens */ public final class AuthUtil { private static final String HMAC_SHA256_ALGORITHM = HmacAlgorithms.HMAC_SHA_256.toString(); private static final String DELIIMITER = ":"; private static final String HEX_CHARS = "0123456789ABCDEF"; - private AuthUtil() { } + private AuthUtil() {} /** * Generate hash string using the secret HMAC Key + * * @param value value to be hashed * @param hmacKey secret HMAC key * @return Hashed string using the secret key * @throws NoSuchAlgorithmException * @throws InvalidKeyException */ - public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlgorithmException, InvalidKeyException { - //Time-stamp at Encryption time + public static String generateHash(String value, byte[] hmacKey) + throws NoSuchAlgorithmException, InvalidKeyException { + // Time-stamp at Encryption time long tStamp = System.currentTimeMillis(); String uTValue = new String(); String cValue; String finalEncValue; - //Concatenated Values + // Concatenated Values uTValue = uTValue.concat(value).concat(":").concat(Long.toString(tStamp)); cValue = uTValue; - //Digest - HMAC-SHA256 + // Digest - HMAC-SHA256 SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); byte[] rawHmac = mac.doFinal(uTValue.getBytes()); String hmacString = getHex(rawHmac); - finalEncValue = Base64.getEncoder().encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); + finalEncValue = + Base64.getEncoder() + .encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes())); return finalEncValue; } /** * Validate the one-way hash string + * * @param hashedValue Hashed value to be validated * @param hmacKey HMAC Key used to create the hash * @param sessionWindow previously defined session window to validate if the hash is expired @@ -62,7 +63,7 @@ public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlg */ public static String verifyHash(String hashedValue, byte[] hmacKey, long sessionWindow) throws GeneralSecurityException { - //Username:Timestamp:SignedHMAC(Username:Timestamp) + // Username:Timestamp:SignedHMAC(Username:Timestamp) String[] decryptedHash = decryptBase64Hash(hashedValue); String username = decryptedHash[0]; String timestamp = decryptedHash[1]; @@ -70,7 +71,7 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session long newTStamp = System.currentTimeMillis(); String newUTValue = username.concat(DELIIMITER).concat(timestamp); - //Digest - HMAC-SHA1 Verify + // Digest - HMAC-SHA1 Verify SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM); Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM); mac.init(signingKey); @@ -87,8 +88,10 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session return decryptedHash[0]; } + /** * Decrypt base64 hash + * * @param value base 64 hash string * @return Decrypted base 64 string */ @@ -96,8 +99,10 @@ private static String[] decryptBase64Hash(String value) { String decodedBase64 = new String(Base64.getDecoder().decode(value)); return decodedBase64.split(DELIIMITER); } + /** * Get Hex string from byte array + * * @param raw byte array * @return Hex representation of the byte array */ @@ -114,14 +119,16 @@ private static String getHex(byte[] raw) { return hex.toString(); } + /** * Compares two HMAC byte arrays + * * @param a HMAC byte array 1 * @param b HMAC byte array 2 * @return true if the two HMAC are identical */ private static boolean isEqual(byte[] a, byte[] b) { - if (a == null || b == null || a.length != b.length) { + if (a == null || b == null || a.length != b.length) { return false; } @@ -133,4 +140,4 @@ private static boolean isEqual(byte[] a, byte[] b) { return result == 0; } -} \ No newline at end of file +} diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java index 67bcf7e404335f..f46dc57c232bd2 100644 --- a/datahub-frontend/app/security/AuthenticationManager.java +++ b/datahub-frontend/app/security/AuthenticationManager.java @@ -15,13 +15,12 @@ import org.eclipse.jetty.jaas.PropertyUserStoreManager; import play.Logger; - public class AuthenticationManager { - private AuthenticationManager(boolean verbose) { - } + private AuthenticationManager(boolean verbose) {} - public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws Exception { + public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) + throws Exception { Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty"); JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication"); PropertyUserStoreManager propertyUserStoreManager = new PropertyUserStoreManager(); @@ -29,10 +28,12 @@ public static void authenticateJaasUser(@Nonnull String userName, @Nonnull Strin jaasLoginService.setBeans(Collections.singletonList(propertyUserStoreManager)); JAASLoginService.INSTANCE.set(jaasLoginService); try { - LoginContext lc = new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); + LoginContext lc = + new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password)); lc.login(); } catch (LoginException le) { - AuthenticationException authenticationException = new AuthenticationException(le.getMessage()); + AuthenticationException authenticationException = + new AuthenticationException(le.getMessage()); authenticationException.setRootCause(le); throw authenticationException; } @@ -52,7 +53,8 @@ public void handle(@Nonnull Callback[] callbacks) { NameCallback nc = null; PasswordCallback pc = null; for (Callback callback : callbacks) { - Logger.debug("The submitted callback is of type: " + callback.getClass() + " : " + callback); + Logger.debug( + "The submitted callback is of type: " + callback.getClass() + " : " + callback); if (callback instanceof NameCallback) { nc = (NameCallback) callback; nc.setName(this.username); diff --git a/datahub-frontend/app/security/DummyLoginModule.java b/datahub-frontend/app/security/DummyLoginModule.java index 56822f0805be41..c46fa29e1599ad 100644 --- a/datahub-frontend/app/security/DummyLoginModule.java +++ b/datahub-frontend/app/security/DummyLoginModule.java @@ -1,21 +1,22 @@ package security; +import java.util.Map; import javax.security.auth.Subject; import javax.security.auth.callback.CallbackHandler; import javax.security.auth.login.LoginException; import javax.security.auth.spi.LoginModule; -import java.util.Map; - /** - * This LoginModule performs dummy authentication. - * Any username and password can work for authentication + * This LoginModule performs dummy authentication. Any username and password can work for + * authentication */ public class DummyLoginModule implements LoginModule { - public void initialize(final Subject subject, final CallbackHandler callbackHandler, - final Map sharedState, final Map options) { - } + public void initialize( + final Subject subject, + final CallbackHandler callbackHandler, + final Map sharedState, + final Map options) {} public boolean login() throws LoginException { return true; @@ -32,5 +33,4 @@ public boolean abort() throws LoginException { public boolean logout() throws LoginException { return true; } - -} \ No newline at end of file +} diff --git a/datahub-frontend/app/utils/ConfigUtil.java b/datahub-frontend/app/utils/ConfigUtil.java index b99a5e123b9eb9..5c80389c96da49 100644 --- a/datahub-frontend/app/utils/ConfigUtil.java +++ b/datahub-frontend/app/utils/ConfigUtil.java @@ -3,18 +3,16 @@ import com.linkedin.util.Configuration; import com.typesafe.config.Config; - public class ConfigUtil { - private ConfigUtil() { - - } + private ConfigUtil() {} // New configurations, provided via application.conf file. public static final String METADATA_SERVICE_HOST_CONFIG_PATH = "metadataService.host"; public static final String METADATA_SERVICE_PORT_CONFIG_PATH = "metadataService.port"; public static final String METADATA_SERVICE_USE_SSL_CONFIG_PATH = "metadataService.useSsl"; - public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = "metadataService.sslProtocol"; + public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = + "metadataService.sslProtocol"; // Legacy env-var based config values, for backwards compatibility: public static final String GMS_HOST_ENV_VAR = "DATAHUB_GMS_HOST"; @@ -27,10 +25,14 @@ private ConfigUtil() { public static final String DEFAULT_GMS_PORT = "8080"; public static final String DEFAULT_GMS_USE_SSL = "False"; - public static final String DEFAULT_METADATA_SERVICE_HOST = Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); - public static final Integer DEFAULT_METADATA_SERVICE_PORT = Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); - public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); - public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); + public static final String DEFAULT_METADATA_SERVICE_HOST = + Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost"); + public static final Integer DEFAULT_METADATA_SERVICE_PORT = + Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080")); + public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = + Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False")); + public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = + Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR); public static boolean getBoolean(Config config, String key) { return config.hasPath(key) && config.getBoolean(key); diff --git a/datahub-frontend/app/utils/SearchUtil.java b/datahub-frontend/app/utils/SearchUtil.java index 2c52ff5b40156c..803c70a63646a0 100644 --- a/datahub-frontend/app/utils/SearchUtil.java +++ b/datahub-frontend/app/utils/SearchUtil.java @@ -2,29 +2,26 @@ import javax.annotation.Nonnull; - -/** - * Utility functions for Search - */ +/** Utility functions for Search */ public class SearchUtil { - private SearchUtil() { - //utility class - } + private SearchUtil() { + // utility class + } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - * - * @param input - * @return - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + * + * @param input + * @return + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } + return input; + } } diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle index 9a5fb3210a3115..437c72e6394ea6 100644 --- a/datahub-frontend/build.gradle +++ b/datahub-frontend/build.gradle @@ -2,6 +2,7 @@ plugins { id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" id 'scala' id 'com.palantir.docker' + id 'org.gradle.playframework' } apply from: "../gradle/versioning/versioning.gradle" @@ -20,7 +21,6 @@ model { } task myTar(type: Tar) { - extension = "tgz" compression = Compression.GZIP from("${buildDir}/stage") @@ -55,8 +55,6 @@ tasks.withType(Checkstyle) { exclude "**/generated/**" } -checkstyleMain.source = "app/" - /* PLAY UPGRADE NOTE @@ -121,3 +119,23 @@ task cleanLocalDockerImages { } } dockerClean.finalizedBy(cleanLocalDockerImages) + +// gradle 8 fixes +tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist' +tasks.getByName('createDatahub-frontendZipDist').dependsOn 'stageMainDist' +stagePlayBinaryDist.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistTar.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +playBinaryDistZip.dependsOn tasks.getByName('createDatahub-frontendStartScripts') +tasks.getByName('stageDatahub-frontendDist').dependsOn stagePlayBinaryDist +tasks.getByName('stageDatahub-frontendDist').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistTar').dependsOn createMainStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createPlayBinaryStartScripts +tasks.getByName('datahub-frontendDistZip').dependsOn createMainStartScripts +playBinaryDistTar.dependsOn createMainStartScripts +playBinaryDistZip.dependsOn createMainStartScripts +createMainStartScripts.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryZipDist.dependsOn 'stageDatahub-frontendDist' +createPlayBinaryTarDist.dependsOn 'stageMainDist' +createPlayBinaryZipDist.dependsOn 'stageMainDist' diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index dd1ceee411f746..84fb4c02620b8e 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -1,4 +1,3 @@ -apply plugin: "org.gradle.playframework" // Change this to listen on a different port project.ext.httpPort = 9001 @@ -101,4 +100,22 @@ play { test { useJUnitPlatform() + + def playJava17CompatibleJvmArgs = [ + "--add-opens=java.base/java.lang=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED", + //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED", + //"--add-opens=java.base/java.io=ALL-UNNAMED", + //"--add-opens=java.base/java.net=ALL-UNNAMED", + //"--add-opens=java.base/java.nio=ALL-UNNAMED", + "--add-opens=java.base/java.util=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED", + //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED", + //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED", + //"--add-opens=java.base/sun.security.action=ALL-UNNAMED", + //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED", + //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED", + ] + jvmArgs = playJava17CompatibleJvmArgs } diff --git a/datahub-frontend/test/app/ApplicationTest.java b/datahub-frontend/test/app/ApplicationTest.java index f27fefdb796691..a5da0951d16328 100644 --- a/datahub-frontend/test/app/ApplicationTest.java +++ b/datahub-frontend/test/app/ApplicationTest.java @@ -1,11 +1,22 @@ package app; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static play.mvc.Http.Status.NOT_FOUND; +import static play.mvc.Http.Status.OK; +import static play.test.Helpers.fakeRequest; +import static play.test.Helpers.route; + import com.nimbusds.jwt.JWT; import com.nimbusds.jwt.JWTClaimsSet; import com.nimbusds.jwt.JWTParser; import controllers.routes; +import java.io.IOException; +import java.net.InetAddress; import java.text.ParseException; import java.util.Date; +import java.util.List; +import java.util.Map; import no.nav.security.mock.oauth2.MockOAuth2Server; import no.nav.security.mock.oauth2.token.DefaultOAuth2TokenCallback; import okhttp3.mockwebserver.MockResponse; @@ -26,22 +37,9 @@ import play.mvc.Http; import play.mvc.Result; import play.test.Helpers; - import play.test.TestBrowser; import play.test.WithBrowser; -import java.io.IOException; -import java.net.InetAddress; -import java.util.List; -import java.util.Map; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertTrue; -import static play.mvc.Http.Status.NOT_FOUND; -import static play.mvc.Http.Status.OK; -import static play.test.Helpers.fakeRequest; -import static play.test.Helpers.route; - @TestInstance(TestInstance.Lifecycle.PER_CLASS) @SetEnvironmentVariable(key = "DATAHUB_SECRET", value = "test") @SetEnvironmentVariable(key = "KAFKA_BOOTSTRAP_SERVER", value = "") @@ -56,11 +54,15 @@ public class ApplicationTest extends WithBrowser { @Override protected Application provideApplication() { return new GuiceApplicationBuilder() - .configure("metadataService.port", String.valueOf(gmsServerPort())) - .configure("auth.baseUrl", "http://localhost:" + providePort()) - .configure("auth.oidc.discoveryUri", "http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration") - .in(new Environment(Mode.TEST)).build(); + .configure("metadataService.port", String.valueOf(gmsServerPort())) + .configure("auth.baseUrl", "http://localhost:" + providePort()) + .configure( + "auth.oidc.discoveryUri", + "http://localhost:" + + oauthServerPort() + + "/testIssuer/.well-known/openid-configuration") + .in(new Environment(Mode.TEST)) + .build(); } @Override @@ -90,16 +92,20 @@ public int gmsServerPort() { public void init() throws IOException { _gmsServer = new MockWebServer(); _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"value\":\"%s\"}", TEST_USER))); - _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); + _gmsServer.enqueue( + new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN))); _gmsServer.start(gmsServerPort()); _oauthServer = new MockOAuth2Server(); _oauthServer.enqueueCallback( - new DefaultOAuth2TokenCallback(ISSUER_ID, "testUser", List.of(), Map.of( - "email", "testUser@myCompany.com", - "groups", "myGroup" - ), 600) - ); + new DefaultOAuth2TokenCallback( + ISSUER_ID, + "testUser", + List.of(), + Map.of( + "email", "testUser@myCompany.com", + "groups", "myGroup"), + 600)); _oauthServer.start(InetAddress.getByName("localhost"), oauthServerPort()); // Discovery url to authorization server metadata @@ -147,8 +153,9 @@ public void testIndexNotFound() { @Test public void testOpenIdConfig() { - assertEquals("http://localhost:" + oauthServerPort() - + "/testIssuer/.well-known/openid-configuration", _wellKnownUrl); + assertEquals( + "http://localhost:" + oauthServerPort() + "/testIssuer/.well-known/openid-configuration", + _wellKnownUrl); } @Test @@ -166,8 +173,13 @@ public void testHappyPathOidc() throws ParseException { Map data = (Map) claims.getClaim("data"); assertEquals(TEST_TOKEN, data.get("token")); assertEquals(TEST_USER, data.get("actor")); - // Default expiration is 24h, so should always be less than current time + 1 day since it stamps the time before this executes - assertTrue(claims.getExpirationTime().compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) < 0); + // Default expiration is 24h, so should always be less than current time + 1 day since it stamps + // the time before this executes + assertTrue( + claims + .getExpirationTime() + .compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) + < 0); } @Test diff --git a/datahub-frontend/test/security/DummyLoginModuleTest.java b/datahub-frontend/test/security/DummyLoginModuleTest.java index 6727513d884af1..9bf2b5dd4d11c0 100644 --- a/datahub-frontend/test/security/DummyLoginModuleTest.java +++ b/datahub-frontend/test/security/DummyLoginModuleTest.java @@ -1,14 +1,12 @@ package security; -import com.sun.security.auth.callback.TextCallbackHandler; -import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; +import com.sun.security.auth.callback.TextCallbackHandler; import java.util.HashMap; import javax.security.auth.Subject; import javax.security.auth.login.LoginException; - -import static org.junit.jupiter.api.Assertions.*; - +import org.junit.jupiter.api.Test; public class DummyLoginModuleTest { diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java index ed16014b58e595..a27a1462a8a277 100644 --- a/datahub-frontend/test/security/OidcConfigurationTest.java +++ b/datahub-frontend/test/security/OidcConfigurationTest.java @@ -1,5 +1,8 @@ package security; +import static auth.sso.oidc.OidcConfigs.*; +import static org.junit.jupiter.api.Assertions.assertEquals; + import auth.sso.oidc.OidcConfigs; import auth.sso.oidc.OidcProvider; import com.typesafe.config.Config; @@ -19,296 +22,290 @@ import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; - import org.junit.jupiter.api.Test; import org.pac4j.oidc.client.OidcClient; -import static auth.sso.oidc.OidcConfigs.*; -import static org.junit.jupiter.api.Assertions.assertEquals; - - public class OidcConfigurationTest { - private static final com.typesafe.config.Config CONFIG = new Config() { - - private final Map _map = new HashMap<>(); - - @Override - public ConfigObject root() { - return null; - } - - @Override - public ConfigOrigin origin() { - return null; - } - - @Override - public Config withFallback(ConfigMergeable other) { - return null; - } - - @Override - public Config resolve() { - return null; - } - - @Override - public Config resolve(ConfigResolveOptions options) { - return null; - } - - @Override - public boolean isResolved() { - return false; - } - - @Override - public Config resolveWith(Config source) { - return null; - } - - @Override - public Config resolveWith(Config source, ConfigResolveOptions options) { - return null; - } - - @Override - public void checkValid(Config reference, String... restrictToPaths) { - - } - - @Override - public boolean hasPath(String path) { - return true; - } - - @Override - public boolean hasPathOrNull(String path) { - return false; - } - - @Override - public boolean isEmpty() { - return false; - } - - @Override - public Set> entrySet() { - return null; - } - - @Override - public boolean getIsNull(String path) { - return false; - } - - @Override - public boolean getBoolean(String path) { - return false; - } - - @Override - public Number getNumber(String path) { - return null; - } - - @Override - public int getInt(String path) { - return 0; - } - - @Override - public long getLong(String path) { - return 0; - } - - @Override - public double getDouble(String path) { - return 0; - } - - @Override - public String getString(String path) { - return (String) _map.getOrDefault(path, "1"); - } - - @Override - public > T getEnum(Class enumClass, String path) { - return null; - } - - @Override - public ConfigObject getObject(String path) { - return null; - } - - @Override - public Config getConfig(String path) { - return null; - } - - @Override - public Object getAnyRef(String path) { - return null; - } - - @Override - public ConfigValue getValue(String path) { - return null; - } - - @Override - public Long getBytes(String path) { - return null; - } - - @Override - public ConfigMemorySize getMemorySize(String path) { - return null; - } - - @Override - public Long getMilliseconds(String path) { - return null; - } - - @Override - public Long getNanoseconds(String path) { - return null; - } - - @Override - public long getDuration(String path, TimeUnit unit) { - return 0; - } - - @Override - public Duration getDuration(String path) { - return null; - } - - @Override - public Period getPeriod(String path) { - return null; - } - - @Override - public TemporalAmount getTemporal(String path) { - return null; - } - - @Override - public ConfigList getList(String path) { - return null; - } - - @Override - public List getBooleanList(String path) { - return null; - } - - @Override - public List getNumberList(String path) { - return null; - } - - @Override - public List getIntList(String path) { - return null; - } - - @Override - public List getLongList(String path) { - return null; - } - - @Override - public List getDoubleList(String path) { - return null; - } - - @Override - public List getStringList(String path) { - return null; - } - - @Override - public > List getEnumList(Class enumClass, String path) { - return null; - } - - @Override - public List getObjectList(String path) { - return null; - } - - @Override - public List getConfigList(String path) { - return null; - } - - @Override - public List getAnyRefList(String path) { - return null; - } - - @Override - public List getBytesList(String path) { - return null; - } - - @Override - public List getMemorySizeList(String path) { - return null; - } - - @Override - public List getMillisecondsList(String path) { - return null; - } - - @Override - public List getNanosecondsList(String path) { - return null; - } - - @Override - public List getDurationList(String path, TimeUnit unit) { - return null; - } - - @Override - public List getDurationList(String path) { - return null; - } - - @Override - public Config withOnlyPath(String path) { - return null; - } - - @Override - public Config withoutPath(String path) { - return null; - } - - @Override - public Config atPath(String path) { - return null; - } - - @Override - public Config atKey(String key) { - return null; - } - - @Override - public Config withValue(String path, ConfigValue value) { - _map.put(path, value.unwrapped()); - return this; - } - }; + private static final com.typesafe.config.Config CONFIG = + new Config() { + + private final Map _map = new HashMap<>(); + + @Override + public ConfigObject root() { + return null; + } + + @Override + public ConfigOrigin origin() { + return null; + } + + @Override + public Config withFallback(ConfigMergeable other) { + return null; + } + + @Override + public Config resolve() { + return null; + } + + @Override + public Config resolve(ConfigResolveOptions options) { + return null; + } + + @Override + public boolean isResolved() { + return false; + } + + @Override + public Config resolveWith(Config source) { + return null; + } + + @Override + public Config resolveWith(Config source, ConfigResolveOptions options) { + return null; + } + + @Override + public void checkValid(Config reference, String... restrictToPaths) {} + + @Override + public boolean hasPath(String path) { + return true; + } + + @Override + public boolean hasPathOrNull(String path) { + return false; + } + + @Override + public boolean isEmpty() { + return false; + } + + @Override + public Set> entrySet() { + return null; + } + + @Override + public boolean getIsNull(String path) { + return false; + } + + @Override + public boolean getBoolean(String path) { + return false; + } + + @Override + public Number getNumber(String path) { + return null; + } + + @Override + public int getInt(String path) { + return 0; + } + + @Override + public long getLong(String path) { + return 0; + } + + @Override + public double getDouble(String path) { + return 0; + } + + @Override + public String getString(String path) { + return (String) _map.getOrDefault(path, "1"); + } + + @Override + public > T getEnum(Class enumClass, String path) { + return null; + } + + @Override + public ConfigObject getObject(String path) { + return null; + } + + @Override + public Config getConfig(String path) { + return null; + } + + @Override + public Object getAnyRef(String path) { + return null; + } + + @Override + public ConfigValue getValue(String path) { + return null; + } + + @Override + public Long getBytes(String path) { + return null; + } + + @Override + public ConfigMemorySize getMemorySize(String path) { + return null; + } + + @Override + public Long getMilliseconds(String path) { + return null; + } + + @Override + public Long getNanoseconds(String path) { + return null; + } + + @Override + public long getDuration(String path, TimeUnit unit) { + return 0; + } + + @Override + public Duration getDuration(String path) { + return null; + } + + @Override + public Period getPeriod(String path) { + return null; + } + + @Override + public TemporalAmount getTemporal(String path) { + return null; + } + + @Override + public ConfigList getList(String path) { + return null; + } + + @Override + public List getBooleanList(String path) { + return null; + } + + @Override + public List getNumberList(String path) { + return null; + } + + @Override + public List getIntList(String path) { + return null; + } + + @Override + public List getLongList(String path) { + return null; + } + + @Override + public List getDoubleList(String path) { + return null; + } + + @Override + public List getStringList(String path) { + return null; + } + + @Override + public > List getEnumList(Class enumClass, String path) { + return null; + } + + @Override + public List getObjectList(String path) { + return null; + } + + @Override + public List getConfigList(String path) { + return null; + } + + @Override + public List getAnyRefList(String path) { + return null; + } + + @Override + public List getBytesList(String path) { + return null; + } + + @Override + public List getMemorySizeList(String path) { + return null; + } + + @Override + public List getMillisecondsList(String path) { + return null; + } + + @Override + public List getNanosecondsList(String path) { + return null; + } + + @Override + public List getDurationList(String path, TimeUnit unit) { + return null; + } + + @Override + public List getDurationList(String path) { + return null; + } + + @Override + public Config withOnlyPath(String path) { + return null; + } + + @Override + public Config withoutPath(String path) { + return null; + } + + @Override + public Config atPath(String path) { + return null; + } + + @Override + public Config atKey(String key) { + return null; + } + + @Override + public Config withValue(String path, ConfigValue value) { + _map.put(path, value.unwrapped()); + return this; + } + }; @Test public void readTimeoutPropagation() { diff --git a/datahub-frontend/test/utils/SearchUtilTest.java b/datahub-frontend/test/utils/SearchUtilTest.java index 428566ae3f4247..6767fa56374692 100644 --- a/datahub-frontend/test/utils/SearchUtilTest.java +++ b/datahub-frontend/test/utils/SearchUtilTest.java @@ -1,17 +1,18 @@ package utils; -import org.junit.jupiter.api.Test; - import static org.junit.jupiter.api.Assertions.assertEquals; +import org.junit.jupiter.api.Test; + public class SearchUtilTest { - @Test - public void testEscapeForwardSlash() { - // escape "/" - assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); - // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to retain the regex behaviour with "*" - assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); - assertEquals("", ""); - assertEquals("foo", "foo"); - } + @Test + public void testEscapeForwardSlash() { + // escape "/" + assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar")); + // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to + // retain the regex behaviour with "*" + assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*")); + assertEquals("", ""); + assertEquals("foo", "foo"); + } } diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index fba0031351b588..6e8cb939669226 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -1,7 +1,8 @@ plugins { + id 'java' id "io.github.kobylynskyi.graphql.codegen" version "4.1.1" } -apply plugin: 'java' + dependencies { implementation project(':metadata-service:restli-client') diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java index 4488f27c19d808..e45bed33eb0236 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql; -/** - * Constants relating to GraphQL type system & execution. - */ +/** Constants relating to GraphQL type system & execution. */ public class Constants { - private Constants() { }; + private Constants() {} + ; - public static final String URN_FIELD_NAME = "urn"; - public static final String URNS_FIELD_NAME = "urns"; - public static final String GMS_SCHEMA_FILE = "entity.graphql"; - public static final String SEARCH_SCHEMA_FILE = "search.graphql"; - public static final String APP_SCHEMA_FILE = "app.graphql"; - public static final String AUTH_SCHEMA_FILE = "auth.graphql"; - public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; - public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; - public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; - public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; - public static final String TESTS_SCHEMA_FILE = "tests.graphql"; - public static final String STEPS_SCHEMA_FILE = "step.graphql"; - public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; - public static final String BROWSE_PATH_DELIMITER = "/"; - public static final String BROWSE_PATH_V2_DELIMITER = "␟"; - public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; - - public static final String ENTITY_FILTER_NAME = "_entityType"; + public static final String URN_FIELD_NAME = "urn"; + public static final String URNS_FIELD_NAME = "urns"; + public static final String GMS_SCHEMA_FILE = "entity.graphql"; + public static final String SEARCH_SCHEMA_FILE = "search.graphql"; + public static final String APP_SCHEMA_FILE = "app.graphql"; + public static final String AUTH_SCHEMA_FILE = "auth.graphql"; + public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql"; + public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql"; + public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql"; + public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql"; + public static final String TESTS_SCHEMA_FILE = "tests.graphql"; + public static final String STEPS_SCHEMA_FILE = "step.graphql"; + public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql"; + public static final String BROWSE_PATH_DELIMITER = "/"; + public static final String BROWSE_PATH_V2_DELIMITER = "␟"; + public static final String VERSION_STAMP_FIELD_NAME = "versionStamp"; + public static final String ENTITY_FILTER_NAME = "_entityType"; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index 9ea8126a07ab2b..f0cb56b1a99ce4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; +import static graphql.scalars.ExtendedScalars.*; + import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; @@ -68,7 +72,6 @@ import com.linkedin.datahub.graphql.generated.ListQueriesResult; import com.linkedin.datahub.graphql.generated.ListTestsResult; import com.linkedin.datahub.graphql.generated.ListViewsResult; -import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.MLFeature; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; import com.linkedin.datahub.graphql.generated.MLFeatureTable; @@ -78,6 +81,7 @@ import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; +import com.linkedin.datahub.graphql.generated.MatchedField; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.Owner; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; @@ -284,7 +288,6 @@ import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType; import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper; import com.linkedin.datahub.graphql.types.domain.DomainType; -import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType; import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType; @@ -297,6 +300,7 @@ import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType; import com.linkedin.datahub.graphql.types.query.QueryType; import com.linkedin.datahub.graphql.types.role.DataHubRoleType; +import com.linkedin.datahub.graphql.types.rolemetadata.RoleType; import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType; import com.linkedin.datahub.graphql.types.tag.TagType; import com.linkedin.datahub.graphql.types.test.TestType; @@ -352,205 +356,191 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderOptions; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; -import static graphql.scalars.ExtendedScalars.*; - - /** - * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph. + * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS + * graph. */ @Slf4j @Getter public class GmsGraphQLEngine { - private final EntityClient entityClient; - private final SystemEntityClient systemEntityClient; - private final GraphClient graphClient; - private final UsageClient usageClient; - private final SiblingGraphService siblingGraphService; - - private final EntityService entityService; - private final AnalyticsService analyticsService; - private final RecommendationsService recommendationsService; - private final EntityRegistry entityRegistry; - private final StatefulTokenService statefulTokenService; - private final SecretService secretService; - private final GitVersion gitVersion; - private final boolean supportsImpactAnalysis; - private final TimeseriesAspectService timeseriesAspectService; - private final TimelineService timelineService; - private final NativeUserService nativeUserService; - private final GroupService groupService; - private final RoleService roleService; - private final InviteTokenService inviteTokenService; - private final PostService postService; - private final SettingsService settingsService; - private final ViewService viewService; - private final OwnershipTypeService ownershipTypeService; - private final LineageService lineageService; - private final QueryService queryService; - private final DataProductService dataProductService; - - private final FeatureFlags featureFlags; - - private final IngestionConfiguration ingestionConfiguration; - private final AuthenticationConfiguration authenticationConfiguration; - private final AuthorizationConfiguration authorizationConfiguration; - private final VisualConfiguration visualConfiguration; - private final TelemetryConfiguration telemetryConfiguration; - private final TestsConfiguration testsConfiguration; - private final DataHubConfiguration datahubConfiguration; - private final ViewsConfiguration viewsConfiguration; - - private final DatasetType datasetType; - - private final RoleType roleType; - - private final CorpUserType corpUserType; - private final CorpGroupType corpGroupType; - private final ChartType chartType; - private final DashboardType dashboardType; - private final DataPlatformType dataPlatformType; - private final TagType tagType; - private final MLModelType mlModelType; - private final MLModelGroupType mlModelGroupType; - private final MLFeatureType mlFeatureType; - private final MLFeatureTableType mlFeatureTableType; - private final MLPrimaryKeyType mlPrimaryKeyType; - private final DataFlowType dataFlowType; - private final DataJobType dataJobType; - private final GlossaryTermType glossaryTermType; - private final GlossaryNodeType glossaryNodeType; - private final AspectType aspectType; - private final ContainerType containerType; - private final DomainType domainType; - private final NotebookType notebookType; - private final AssertionType assertionType; - private final VersionedDatasetType versionedDatasetType; - private final DataPlatformInstanceType dataPlatformInstanceType; - private final AccessTokenMetadataType accessTokenMetadataType; - private final TestType testType; - private final DataHubPolicyType dataHubPolicyType; - private final DataHubRoleType dataHubRoleType; - private final SchemaFieldType schemaFieldType; - private final DataHubViewType dataHubViewType; - private final QueryType queryType; - private final DataProductType dataProductType; - private final OwnershipType ownershipType; - - /** - * A list of GraphQL Plugins that extend the core engine - */ - private final List graphQLPlugins; - - /** - * Configures the graph objects that can be fetched primary key. - */ - public final List> entityTypes; - - /** - * Configures all graph objects - */ - public final List> loadableTypes; - - /** - * Configures the graph objects for owner - */ - public final List> ownerTypes; - - /** - * Configures the graph objects that can be searched. - */ - public final List> searchableTypes; - - /** - * Configures the graph objects that can be browsed. - */ - public final List> browsableTypes; - - public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { - - this.graphQLPlugins = List.of( + private final EntityClient entityClient; + private final SystemEntityClient systemEntityClient; + private final GraphClient graphClient; + private final UsageClient usageClient; + private final SiblingGraphService siblingGraphService; + + private final EntityService entityService; + private final AnalyticsService analyticsService; + private final RecommendationsService recommendationsService; + private final EntityRegistry entityRegistry; + private final StatefulTokenService statefulTokenService; + private final SecretService secretService; + private final GitVersion gitVersion; + private final boolean supportsImpactAnalysis; + private final TimeseriesAspectService timeseriesAspectService; + private final TimelineService timelineService; + private final NativeUserService nativeUserService; + private final GroupService groupService; + private final RoleService roleService; + private final InviteTokenService inviteTokenService; + private final PostService postService; + private final SettingsService settingsService; + private final ViewService viewService; + private final OwnershipTypeService ownershipTypeService; + private final LineageService lineageService; + private final QueryService queryService; + private final DataProductService dataProductService; + + private final FeatureFlags featureFlags; + + private final IngestionConfiguration ingestionConfiguration; + private final AuthenticationConfiguration authenticationConfiguration; + private final AuthorizationConfiguration authorizationConfiguration; + private final VisualConfiguration visualConfiguration; + private final TelemetryConfiguration telemetryConfiguration; + private final TestsConfiguration testsConfiguration; + private final DataHubConfiguration datahubConfiguration; + private final ViewsConfiguration viewsConfiguration; + + private final DatasetType datasetType; + + private final RoleType roleType; + + private final CorpUserType corpUserType; + private final CorpGroupType corpGroupType; + private final ChartType chartType; + private final DashboardType dashboardType; + private final DataPlatformType dataPlatformType; + private final TagType tagType; + private final MLModelType mlModelType; + private final MLModelGroupType mlModelGroupType; + private final MLFeatureType mlFeatureType; + private final MLFeatureTableType mlFeatureTableType; + private final MLPrimaryKeyType mlPrimaryKeyType; + private final DataFlowType dataFlowType; + private final DataJobType dataJobType; + private final GlossaryTermType glossaryTermType; + private final GlossaryNodeType glossaryNodeType; + private final AspectType aspectType; + private final ContainerType containerType; + private final DomainType domainType; + private final NotebookType notebookType; + private final AssertionType assertionType; + private final VersionedDatasetType versionedDatasetType; + private final DataPlatformInstanceType dataPlatformInstanceType; + private final AccessTokenMetadataType accessTokenMetadataType; + private final TestType testType; + private final DataHubPolicyType dataHubPolicyType; + private final DataHubRoleType dataHubRoleType; + private final SchemaFieldType schemaFieldType; + private final DataHubViewType dataHubViewType; + private final QueryType queryType; + private final DataProductType dataProductType; + private final OwnershipType ownershipType; + + /** A list of GraphQL Plugins that extend the core engine */ + private final List graphQLPlugins; + + /** Configures the graph objects that can be fetched primary key. */ + public final List> entityTypes; + + /** Configures all graph objects */ + public final List> loadableTypes; + + /** Configures the graph objects for owner */ + public final List> ownerTypes; + + /** Configures the graph objects that can be searched. */ + public final List> searchableTypes; + + /** Configures the graph objects that can be browsed. */ + public final List> browsableTypes; + + public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { + + this.graphQLPlugins = + List.of( // Add new plugins here - ); - - this.graphQLPlugins.forEach(plugin -> plugin.init(args)); - - this.entityClient = args.entityClient; - this.systemEntityClient = args.systemEntityClient; - this.graphClient = args.graphClient; - this.usageClient = args.usageClient; - this.siblingGraphService = args.siblingGraphService; - - this.analyticsService = args.analyticsService; - this.entityService = args.entityService; - this.recommendationsService = args.recommendationsService; - this.statefulTokenService = args.statefulTokenService; - this.secretService = args.secretService; - this.entityRegistry = args.entityRegistry; - this.gitVersion = args.gitVersion; - this.supportsImpactAnalysis = args.supportsImpactAnalysis; - this.timeseriesAspectService = args.timeseriesAspectService; - this.timelineService = args.timelineService; - this.nativeUserService = args.nativeUserService; - this.groupService = args.groupService; - this.roleService = args.roleService; - this.inviteTokenService = args.inviteTokenService; - this.postService = args.postService; - this.viewService = args.viewService; - this.ownershipTypeService = args.ownershipTypeService; - this.settingsService = args.settingsService; - this.lineageService = args.lineageService; - this.queryService = args.queryService; - this.dataProductService = args.dataProductService; - - this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); - this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); - this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); - this.visualConfiguration = args.visualConfiguration; - this.telemetryConfiguration = args.telemetryConfiguration; - this.testsConfiguration = args.testsConfiguration; - this.datahubConfiguration = args.datahubConfiguration; - this.viewsConfiguration = args.viewsConfiguration; - this.featureFlags = args.featureFlags; - - this.datasetType = new DatasetType(entityClient); - this.roleType = new RoleType(entityClient); - this.corpUserType = new CorpUserType(entityClient, featureFlags); - this.corpGroupType = new CorpGroupType(entityClient); - this.chartType = new ChartType(entityClient); - this.dashboardType = new DashboardType(entityClient); - this.dataPlatformType = new DataPlatformType(entityClient); - this.tagType = new TagType(entityClient); - this.mlModelType = new MLModelType(entityClient); - this.mlModelGroupType = new MLModelGroupType(entityClient); - this.mlFeatureType = new MLFeatureType(entityClient); - this.mlFeatureTableType = new MLFeatureTableType(entityClient); - this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); - this.dataFlowType = new DataFlowType(entityClient); - this.dataJobType = new DataJobType(entityClient); - this.glossaryTermType = new GlossaryTermType(entityClient); - this.glossaryNodeType = new GlossaryNodeType(entityClient); - this.aspectType = new AspectType(entityClient); - this.containerType = new ContainerType(entityClient); - this.domainType = new DomainType(entityClient); - this.notebookType = new NotebookType(entityClient); - this.assertionType = new AssertionType(entityClient); - this.versionedDatasetType = new VersionedDatasetType(entityClient); - this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); - this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); - this.testType = new TestType(entityClient); - this.dataHubPolicyType = new DataHubPolicyType(entityClient); - this.dataHubRoleType = new DataHubRoleType(entityClient); - this.schemaFieldType = new SchemaFieldType(); - this.dataHubViewType = new DataHubViewType(entityClient); - this.queryType = new QueryType(entityClient); - this.dataProductType = new DataProductType(entityClient); - this.ownershipType = new OwnershipType(entityClient); - - // Init Lists - this.entityTypes = ImmutableList.of( + ); + + this.graphQLPlugins.forEach(plugin -> plugin.init(args)); + + this.entityClient = args.entityClient; + this.systemEntityClient = args.systemEntityClient; + this.graphClient = args.graphClient; + this.usageClient = args.usageClient; + this.siblingGraphService = args.siblingGraphService; + + this.analyticsService = args.analyticsService; + this.entityService = args.entityService; + this.recommendationsService = args.recommendationsService; + this.statefulTokenService = args.statefulTokenService; + this.secretService = args.secretService; + this.entityRegistry = args.entityRegistry; + this.gitVersion = args.gitVersion; + this.supportsImpactAnalysis = args.supportsImpactAnalysis; + this.timeseriesAspectService = args.timeseriesAspectService; + this.timelineService = args.timelineService; + this.nativeUserService = args.nativeUserService; + this.groupService = args.groupService; + this.roleService = args.roleService; + this.inviteTokenService = args.inviteTokenService; + this.postService = args.postService; + this.viewService = args.viewService; + this.ownershipTypeService = args.ownershipTypeService; + this.settingsService = args.settingsService; + this.lineageService = args.lineageService; + this.queryService = args.queryService; + this.dataProductService = args.dataProductService; + + this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration); + this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration); + this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration); + this.visualConfiguration = args.visualConfiguration; + this.telemetryConfiguration = args.telemetryConfiguration; + this.testsConfiguration = args.testsConfiguration; + this.datahubConfiguration = args.datahubConfiguration; + this.viewsConfiguration = args.viewsConfiguration; + this.featureFlags = args.featureFlags; + + this.datasetType = new DatasetType(entityClient); + this.roleType = new RoleType(entityClient); + this.corpUserType = new CorpUserType(entityClient, featureFlags); + this.corpGroupType = new CorpGroupType(entityClient); + this.chartType = new ChartType(entityClient); + this.dashboardType = new DashboardType(entityClient); + this.dataPlatformType = new DataPlatformType(entityClient); + this.tagType = new TagType(entityClient); + this.mlModelType = new MLModelType(entityClient); + this.mlModelGroupType = new MLModelGroupType(entityClient); + this.mlFeatureType = new MLFeatureType(entityClient); + this.mlFeatureTableType = new MLFeatureTableType(entityClient); + this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient); + this.dataFlowType = new DataFlowType(entityClient); + this.dataJobType = new DataJobType(entityClient); + this.glossaryTermType = new GlossaryTermType(entityClient); + this.glossaryNodeType = new GlossaryNodeType(entityClient); + this.aspectType = new AspectType(entityClient); + this.containerType = new ContainerType(entityClient); + this.domainType = new DomainType(entityClient); + this.notebookType = new NotebookType(entityClient); + this.assertionType = new AssertionType(entityClient); + this.versionedDatasetType = new VersionedDatasetType(entityClient); + this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient); + this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient); + this.testType = new TestType(entityClient); + this.dataHubPolicyType = new DataHubPolicyType(entityClient); + this.dataHubRoleType = new DataHubRoleType(entityClient); + this.schemaFieldType = new SchemaFieldType(); + this.dataHubViewType = new DataHubViewType(entityClient); + this.queryType = new QueryType(entityClient); + this.dataProductType = new DataProductType(entityClient); + this.ownershipType = new OwnershipType(entityClient); + + // Init Lists + this.entityTypes = + ImmutableList.of( datasetType, roleType, corpUserType, @@ -582,1262 +572,1867 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) { dataHubViewType, queryType, dataProductType, - ownershipType - ); - this.loadableTypes = new ArrayList<>(entityTypes); - // Extend loadable types with types from the plugins - // This allows us to offer search and browse capabilities out of the box for those types - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - Collection> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - this.loadableTypes.addAll(pluginLoadableTypes); - } - } - this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); - this.searchableTypes = loadableTypes.stream() + ownershipType); + this.loadableTypes = new ArrayList<>(entityTypes); + // Extend loadable types with types from the plugins + // This allows us to offer search and browse capabilities out of the box for those types + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + Collection> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + this.loadableTypes.addAll(pluginLoadableTypes); + } + } + this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType); + this.searchableTypes = + loadableTypes.stream() .filter(type -> (type instanceof SearchableEntityType)) .map(type -> (SearchableEntityType) type) .collect(Collectors.toList()); - this.browsableTypes = loadableTypes.stream() + this.browsableTypes = + loadableTypes.stream() .filter(type -> (type instanceof BrowsableEntityType)) .map(type -> (BrowsableEntityType) type) .collect(Collectors.toList()); - } + } - /** - * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from - * a {@link LoadableType}. - */ - public Map>> loaderSuppliers(final Collection> loadableTypes) { - return loadableTypes - .stream() - .collect(Collectors.toMap( + /** + * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from a {@link + * LoadableType}. + */ + public Map>> loaderSuppliers( + final Collection> loadableTypes) { + return loadableTypes.stream() + .collect( + Collectors.toMap( LoadableType::name, - (graphType) -> (context) -> createDataLoader(graphType, context) - )); - } + (graphType) -> (context) -> createDataLoader(graphType, context))); + } - /** - * Final call to wire up any extra resolvers the plugin might want to add on - * @param builder - */ - private void configurePluginResolvers(final RuntimeWiring.Builder builder) { - this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); - } - - - public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { - configureQueryResolvers(builder); - configureMutationResolvers(builder); - configureGenericEntityResolvers(builder); - configureDatasetResolvers(builder); - configureCorpUserResolvers(builder); - configureCorpGroupResolvers(builder); - configureDashboardResolvers(builder); - configureNotebookResolvers(builder); - configureChartResolvers(builder); - configureTypeResolvers(builder); - configureTypeExtensions(builder); - configureTagAssociationResolver(builder); - configureGlossaryTermAssociationResolver(builder); - configureDataJobResolvers(builder); - configureDataFlowResolvers(builder); - configureMLFeatureTableResolvers(builder); - configureGlossaryRelationshipResolvers(builder); - configureIngestionSourceResolvers(builder); - configureAnalyticsResolvers(builder); - configureContainerResolvers(builder); - configureDataPlatformInstanceResolvers(builder); - configureGlossaryTermResolvers(builder); - configureOrganisationRoleResolvers(builder); - configureGlossaryNodeResolvers(builder); - configureDomainResolvers(builder); - configureDataProductResolvers(builder); - configureAssertionResolvers(builder); - configurePolicyResolvers(builder); - configureDataProcessInstanceResolvers(builder); - configureVersionedDatasetResolvers(builder); - configureAccessAccessTokenMetadataResolvers(builder); - configureTestResultResolvers(builder); - configureRoleResolvers(builder); - configureSchemaFieldResolvers(builder); - configureEntityPathResolvers(builder); - configureViewResolvers(builder); - configureQueryEntityResolvers(builder); - configureOwnershipTypeResolver(builder); - configurePluginResolvers(builder); - } - - private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { - builder.type("Role", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("RoleAssociation", typeWiring -> typeWiring - .dataFetcher("role", - new LoadableTypeResolver<>(roleType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleAssociation) - env.getSource()).getRole().getUrn())) - ); - builder.type("RoleUser", typeWiring -> typeWiring - .dataFetcher("user", - new LoadableTypeResolver<>(corpUserType, - (env) -> ((com.linkedin.datahub.graphql.generated.RoleUser) - env.getSource()).getUser().getUrn())) - ); + /** + * Final call to wire up any extra resolvers the plugin might want to add on + * + * @param builder + */ + private void configurePluginResolvers(final RuntimeWiring.Builder builder) { + this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this)); + } + + public void configureRuntimeWiring(final RuntimeWiring.Builder builder) { + configureQueryResolvers(builder); + configureMutationResolvers(builder); + configureGenericEntityResolvers(builder); + configureDatasetResolvers(builder); + configureCorpUserResolvers(builder); + configureCorpGroupResolvers(builder); + configureDashboardResolvers(builder); + configureNotebookResolvers(builder); + configureChartResolvers(builder); + configureTypeResolvers(builder); + configureTypeExtensions(builder); + configureTagAssociationResolver(builder); + configureGlossaryTermAssociationResolver(builder); + configureDataJobResolvers(builder); + configureDataFlowResolvers(builder); + configureMLFeatureTableResolvers(builder); + configureGlossaryRelationshipResolvers(builder); + configureIngestionSourceResolvers(builder); + configureAnalyticsResolvers(builder); + configureContainerResolvers(builder); + configureDataPlatformInstanceResolvers(builder); + configureGlossaryTermResolvers(builder); + configureOrganisationRoleResolvers(builder); + configureGlossaryNodeResolvers(builder); + configureDomainResolvers(builder); + configureDataProductResolvers(builder); + configureAssertionResolvers(builder); + configurePolicyResolvers(builder); + configureDataProcessInstanceResolvers(builder); + configureVersionedDatasetResolvers(builder); + configureAccessAccessTokenMetadataResolvers(builder); + configureTestResultResolvers(builder); + configureRoleResolvers(builder); + configureSchemaFieldResolvers(builder); + configureEntityPathResolvers(builder); + configureViewResolvers(builder); + configureQueryEntityResolvers(builder); + configureOwnershipTypeResolver(builder); + configurePluginResolvers(builder); + } + + private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) { + builder.type( + "Role", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "RoleAssociation", + typeWiring -> + typeWiring.dataFetcher( + "role", + new LoadableTypeResolver<>( + roleType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleAssociation) env.getSource()) + .getRole() + .getUrn()))); + builder.type( + "RoleUser", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((com.linkedin.datahub.graphql.generated.RoleUser) env.getSource()) + .getUser() + .getUrn()))); + } + + public GraphQLEngine.Builder builder() { + final GraphQLEngine.Builder builder = GraphQLEngine.builder(); + builder + .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) + .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) + .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) + .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) + .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); + + for (GmsGraphQLPlugin plugin : this.graphQLPlugins) { + List pluginSchemaFiles = plugin.getSchemaFiles(); + if (pluginSchemaFiles != null) { + pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); + } + Collection> pluginLoadableTypes = plugin.getLoadableTypes(); + if (pluginLoadableTypes != null) { + pluginLoadableTypes.forEach( + loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); + } } - - public GraphQLEngine.Builder builder() { - final GraphQLEngine.Builder builder = GraphQLEngine.builder(); - builder - .addSchema(fileBasedSchema(GMS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(APP_SCHEMA_FILE)) - .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE)) - .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE)) - .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE)) - .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE)); - - for (GmsGraphQLPlugin plugin: this.graphQLPlugins) { - List pluginSchemaFiles = plugin.getSchemaFiles(); - if (pluginSchemaFiles != null) { - pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema))); - } - Collection> pluginLoadableTypes = plugin.getLoadableTypes(); - if (pluginLoadableTypes != null) { - pluginLoadableTypes.forEach(loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes))); - } - } - builder - .addDataLoaders(loaderSuppliers(loadableTypes)) - .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) - .configureRuntimeWiring(this::configureRuntimeWiring); - return builder; + builder + .addDataLoaders(loaderSuppliers(loadableTypes)) + .addDataLoader("Aspect", context -> createDataLoader(aspectType, context)) + .configureRuntimeWiring(this::configureRuntimeWiring); + return builder; + } + + public static String fileBasedSchema(String fileName) { + String schema; + try { + InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); + schema = IOUtils.toString(is, StandardCharsets.UTF_8); + is.close(); + } catch (IOException e) { + throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); } - - public static String fileBasedSchema(String fileName) { - String schema; - try { - InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName); - schema = IOUtils.toString(is, StandardCharsets.UTF_8); - is.close(); - } catch (IOException e) { - throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e); - } - return schema; + return schema; + } + + private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { + final boolean isAnalyticsEnabled = analyticsService != null; + builder + .type( + "Query", + typeWiring -> + typeWiring.dataFetcher( + "isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) + .type( + "AnalyticsChart", + typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); + if (isAnalyticsEnabled) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "getAnalyticsCharts", new GetChartsResolver(analyticsService, entityClient)) + .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) + .dataFetcher( + "getMetadataAnalyticsCharts", + new GetMetadataAnalyticsResolver(entityClient))); } + } - private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) { - final boolean isAnalyticsEnabled = analyticsService != null; - builder.type("Query", typeWiring -> typeWiring.dataFetcher("isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled))) - .type("AnalyticsChart", typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver())); - if (isAnalyticsEnabled) { - builder.type("Query", typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts", - new GetChartsResolver(analyticsService, entityClient)) - .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService)) - .dataFetcher("getMetadataAnalyticsCharts", new GetMetadataAnalyticsResolver(entityClient))); - } - } - - private void configureContainerResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Container", typeWiring -> typeWiring + private void configureContainerResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Container", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("entities", new ContainerEntitiesResolver(entityClient)) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, (env) -> ((Container) env.getSource()).getPlatform().getUrn())) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Container container = env.getSource(); - return container.getContainer() != null ? container.getContainer().getUrn() : null; - }) - ) + final Container container = env.getSource(); + return container.getContainer() != null + ? container.getContainer().getUrn() + : null; + })) .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Container container = env.getSource(); - return container.getDataPlatformInstance() != null ? container.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataPlatformInstance", typeWiring -> typeWiring - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn())) - ); - } - - private void configureQueryResolvers(final RuntimeWiring.Builder builder) { - builder.type("Query", typeWiring -> typeWiring - .dataFetcher("appConfig", - new AppConfigResolver(gitVersion, analyticsService != null, - this.ingestionConfiguration, - this.authenticationConfiguration, - this.authorizationConfiguration, - this.supportsImpactAnalysis, - this.visualConfiguration, - this.telemetryConfiguration, - this.testsConfiguration, - this.datahubConfiguration, - this.viewsConfiguration, - this.featureFlags - )) - .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) - .dataFetcher("search", new SearchResolver(this.entityClient)) - .dataFetcher("searchAcrossEntities", new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) - .dataFetcher("scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) - .dataFetcher("aggregateAcrossEntities", new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) - .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) - .dataFetcher("autoCompleteForMultiple", new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) - .dataFetcher("browse", new BrowseResolver(browsableTypes)) - .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) - .dataFetcher("dataset", getResolver(datasetType)) - .dataFetcher("role", getResolver(roleType)) - .dataFetcher("versionedDataset", getResolver(versionedDatasetType, - (env) -> new VersionedUrn().setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) - .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) - .dataFetcher("notebook", getResolver(notebookType)) - .dataFetcher("corpUser", getResolver(corpUserType)) - .dataFetcher("corpGroup", getResolver(corpGroupType)) - .dataFetcher("dashboard", getResolver(dashboardType)) - .dataFetcher("chart", getResolver(chartType)) - .dataFetcher("tag", getResolver(tagType)) - .dataFetcher("dataFlow", getResolver(dataFlowType)) - .dataFetcher("dataJob", getResolver(dataJobType)) - .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) - .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) - .dataFetcher("domain", getResolver((domainType))) - .dataFetcher("dataPlatform", getResolver(dataPlatformType)) - .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) - .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) - .dataFetcher("mlFeature", getResolver(mlFeatureType)) - .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) - .dataFetcher("mlModel", getResolver(mlModelType)) - .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) - .dataFetcher("assertion", getResolver(assertionType)) - .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) - .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) - .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) - .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) - .dataFetcher("listRecommendations", new ListRecommendationsResolver(recommendationsService)) - .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) - .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) - .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) - .dataFetcher("container", getResolver(containerType)) - .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) - .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) - .dataFetcher("getSecretValues", new GetSecretValuesResolver(this.entityClient, this.secretService)) - .dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) - .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) - .dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) - .dataFetcher("getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) - .dataFetcher("test", getResolver(testType)) - .dataFetcher("listTests", new ListTestsResolver(entityClient)) - .dataFetcher("getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) - .dataFetcher("getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) - .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) - .dataFetcher("entity", getEntityResolver()) - .dataFetcher("entities", getEntitiesResolver()) - .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) - .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) - .dataFetcher("batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) - .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) - .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) - .dataFetcher("globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) - .dataFetcher("getQuickFilters", new GetQuickFiltersResolver(this.entityClient, this.viewService)) - .dataFetcher("dataProduct", getResolver(dataProductType)) - .dataFetcher("listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) - .dataFetcher("browseV2", new BrowseV2Resolver(this.entityClient, this.viewService)) - ); - } - - private DataFetcher getEntitiesResolver() { - return new BatchGetEntitiesResolver(entityTypes, - (env) -> { - List urns = env.getArgument(URNS_FIELD_NAME); - return urns.stream().map((urn) -> { + final Container container = env.getSource(); + return container.getDataPlatformInstance() != null + ? container.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataPlatformInstance", + typeWiring -> + typeWiring.dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn()))); + } + + private void configureQueryResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Query", + typeWiring -> + typeWiring + .dataFetcher( + "appConfig", + new AppConfigResolver( + gitVersion, + analyticsService != null, + this.ingestionConfiguration, + this.authenticationConfiguration, + this.authorizationConfiguration, + this.supportsImpactAnalysis, + this.visualConfiguration, + this.telemetryConfiguration, + this.testsConfiguration, + this.datahubConfiguration, + this.viewsConfiguration, + this.featureFlags)) + .dataFetcher("me", new MeResolver(this.entityClient, featureFlags)) + .dataFetcher("search", new SearchResolver(this.entityClient)) + .dataFetcher( + "searchAcrossEntities", + new SearchAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "scrollAcrossEntities", + new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher( + "searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient)) + .dataFetcher( + "aggregateAcrossEntities", + new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService)) + .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes)) + .dataFetcher( + "autoCompleteForMultiple", + new AutoCompleteForMultipleResolver(searchableTypes, this.viewService)) + .dataFetcher("browse", new BrowseResolver(browsableTypes)) + .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes)) + .dataFetcher("dataset", getResolver(datasetType)) + .dataFetcher("role", getResolver(roleType)) + .dataFetcher( + "versionedDataset", + getResolver( + versionedDatasetType, + (env) -> + new VersionedUrn() + .setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME))) + .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME)))) + .dataFetcher("notebook", getResolver(notebookType)) + .dataFetcher("corpUser", getResolver(corpUserType)) + .dataFetcher("corpGroup", getResolver(corpGroupType)) + .dataFetcher("dashboard", getResolver(dashboardType)) + .dataFetcher("chart", getResolver(chartType)) + .dataFetcher("tag", getResolver(tagType)) + .dataFetcher("dataFlow", getResolver(dataFlowType)) + .dataFetcher("dataJob", getResolver(dataJobType)) + .dataFetcher("glossaryTerm", getResolver(glossaryTermType)) + .dataFetcher("glossaryNode", getResolver(glossaryNodeType)) + .dataFetcher("domain", getResolver((domainType))) + .dataFetcher("dataPlatform", getResolver(dataPlatformType)) + .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType)) + .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType)) + .dataFetcher("mlFeature", getResolver(mlFeatureType)) + .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType)) + .dataFetcher("mlModel", getResolver(mlModelType)) + .dataFetcher("mlModelGroup", getResolver(mlModelGroupType)) + .dataFetcher("assertion", getResolver(assertionType)) + .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient)) + .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver()) + .dataFetcher("listUsers", new ListUsersResolver(this.entityClient)) + .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient)) + .dataFetcher( + "listRecommendations", new ListRecommendationsResolver(recommendationsService)) + .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient)) + .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService)) + .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient)) + .dataFetcher("container", getResolver(containerType)) + .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient)) + .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient)) + .dataFetcher( + "getSecretValues", + new GetSecretValuesResolver(this.entityClient, this.secretService)) + .dataFetcher( + "listIngestionSources", new ListIngestionSourcesResolver(this.entityClient)) + .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService)) + .dataFetcher( + "getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService)) + .dataFetcher("test", getResolver(testType)) + .dataFetcher("listTests", new ListTestsResolver(entityClient)) + .dataFetcher( + "getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient)) + .dataFetcher( + "getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient)) + .dataFetcher("entityExists", new EntityExistsResolver(this.entityService)) + .dataFetcher("entity", getEntityResolver()) + .dataFetcher("entities", getEntitiesResolver()) + .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) + .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) + .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) + .dataFetcher( + "batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient)) + .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient)) + .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient)) + .dataFetcher( + "globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient)) + .dataFetcher( + "getQuickFilters", + new GetQuickFiltersResolver(this.entityClient, this.viewService)) + .dataFetcher("dataProduct", getResolver(dataProductType)) + .dataFetcher( + "listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher( + "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient)) + .dataFetcher( + "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))); + } + + private DataFetcher getEntitiesResolver() { + return new BatchGetEntitiesResolver( + entityTypes, + (env) -> { + List urns = env.getArgument(URNS_FIELD_NAME); + return urns.stream() + .map( + (urn) -> { try { - Urn entityUrn = Urn.createFromString(urn); - return UrnToEntityMapper.map(entityUrn); + Urn entityUrn = Urn.createFromString(urn); + return UrnToEntityMapper.map(entityUrn); } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); + throw new RuntimeException("Failed to get entity", e); } - }).collect(Collectors.toList()); - }); - } + }) + .collect(Collectors.toList()); + }); + } + + private DataFetcher getEntityResolver() { + return new EntityTypeResolver( + entityTypes, + (env) -> { + try { + Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); + return UrnToEntityMapper.map(urn); + } catch (Exception e) { + throw new RuntimeException("Failed to get entity", e); + } + }); + } + + private DataFetcher getResolver(LoadableType loadableType) { + return getResolver(loadableType, this::getUrnField); + } + + private DataFetcher getResolver( + LoadableType loadableType, Function keyProvider) { + return new LoadableTypeResolver<>(loadableType, keyProvider); + } + + private String getUrnField(DataFetchingEnvironment env) { + return env.getArgument(URN_FIELD_NAME); + } + + private void configureMutationResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Mutation", + typeWiring -> + typeWiring + .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) + .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) + .dataFetcher( + "createTag", new CreateTagResolver(this.entityClient, this.entityService)) + .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) + .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) + .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) + .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) + .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) + .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) + .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) + .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) + .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) + .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) + .dataFetcher("addTag", new AddTagResolver(entityService)) + .dataFetcher("addTags", new AddTagsResolver(entityService)) + .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) + .dataFetcher("removeTag", new RemoveTagResolver(entityService)) + .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) + .dataFetcher("addTerm", new AddTermResolver(entityService)) + .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) + .dataFetcher("addTerms", new AddTermsResolver(entityService)) + .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) + .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) + .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) + .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) + .dataFetcher( + "updateDescription", + new UpdateDescriptionResolver(entityService, this.entityClient)) + .dataFetcher("addOwner", new AddOwnerResolver(entityService)) + .dataFetcher("addOwners", new AddOwnersResolver(entityService)) + .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) + .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) + .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) + .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) + .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) + .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) + .dataFetcher( + "removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) + .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) + .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) + .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) + .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) + .dataFetcher( + "createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) + .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) + .dataFetcher( + "setDomain", new SetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) + .dataFetcher( + "updateDeprecation", + new UpdateDeprecationResolver(this.entityClient, this.entityService)) + .dataFetcher( + "batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) + .dataFetcher( + "unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) + .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) + .dataFetcher( + "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) + .dataFetcher( + "revokeAccessToken", + new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) + .dataFetcher( + "createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) + .dataFetcher( + "createIngestionExecutionRequest", + new CreateIngestionExecutionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "cancelIngestionExecutionRequest", + new CancelIngestionExecutionRequestResolver(this.entityClient)) + .dataFetcher( + "createTestConnectionRequest", + new CreateTestConnectionRequestResolver( + this.entityClient, this.ingestionConfiguration)) + .dataFetcher( + "deleteAssertion", + new DeleteAssertionResolver(this.entityClient, this.entityService)) + .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) + .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) + .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) + .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) + .dataFetcher( + "createGlossaryTerm", + new CreateGlossaryTermResolver(this.entityClient, this.entityService)) + .dataFetcher( + "createGlossaryNode", + new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateParentNode", + new UpdateParentNodeResolver(this.entityService, this.entityClient)) + .dataFetcher( + "deleteGlossaryEntity", + new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) + .dataFetcher( + "updateName", new UpdateNameResolver(this.entityService, this.entityClient)) + .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) + .dataFetcher( + "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) + .dataFetcher( + "createNativeUserResetToken", + new CreateNativeUserResetTokenResolver(this.nativeUserService)) + .dataFetcher( + "batchUpdateSoftDeleted", + new BatchUpdateSoftDeletedResolver(this.entityService)) + .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) + .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) + .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) + .dataFetcher( + "createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) + .dataFetcher( + "acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) + .dataFetcher("createPost", new CreatePostResolver(this.postService)) + .dataFetcher("deletePost", new DeletePostResolver(this.postService)) + .dataFetcher( + "batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) + .dataFetcher("createView", new CreateViewResolver(this.viewService)) + .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) + .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) + .dataFetcher( + "updateGlobalViewsSettings", + new UpdateGlobalViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateCorpUserViewsSettings", + new UpdateCorpUserViewsSettingsResolver(this.settingsService)) + .dataFetcher( + "updateLineage", + new UpdateLineageResolver(this.entityService, this.lineageService)) + .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) + .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) + .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) + .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) + .dataFetcher( + "createDataProduct", new CreateDataProductResolver(this.dataProductService)) + .dataFetcher( + "updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) + .dataFetcher( + "deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) + .dataFetcher( + "batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) + .dataFetcher( + "createOwnershipType", + new CreateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "updateOwnershipType", + new UpdateOwnershipTypeResolver(this.ownershipTypeService)) + .dataFetcher( + "deleteOwnershipType", + new DeleteOwnershipTypeResolver(this.ownershipTypeService))); + } + + private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "SearchResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((SearchResult) env.getSource()).getEntity()))) + .type( + "MatchedField", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((MatchedField) env.getSource()).getEntity()))) + .type( + "SearchAcrossLineageResult", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity()))) + .type( + "AggregationMetadata", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((AggregationMetadata) env.getSource()).getEntity()))) + .type( + "RecommendationContent", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((RecommendationContent) env.getSource()).getEntity()))) + .type( + "BrowseResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, (env) -> ((BrowseResults) env.getSource()).getEntities()))) + .type( + "ParentDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new EntityTypeBatchResolver( + entityTypes, + (env) -> { + final ParentDomainsResult result = env.getSource(); + return result != null ? result.getDomains() : null; + }))) + .type( + "EntityRelationshipLegacy", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity()))) + .type( + "EntityRelationship", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((EntityRelationship) env.getSource()).getEntity()))) + .type( + "BrowseResultGroupV2", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity()))) + .type( + "BrowsePathEntry", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity()))) + .type( + "LineageRelationship", + typeWiring -> + typeWiring + .dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((LineageRelationship) env.getSource()).getEntity())) + .dataFetcher( + "createdActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getCreatedActor() != null + ? relationship.getCreatedActor() + : null; + })) + .dataFetcher( + "updatedActor", + new EntityTypeResolver( + entityTypes, + (env) -> { + final LineageRelationship relationship = env.getSource(); + return relationship.getUpdatedActor() != null + ? relationship.getUpdatedActor() + : null; + }))) + .type( + "ListDomainsResult", + typeWiring -> + typeWiring.dataFetcher( + "domains", + new LoadableTypeBatchResolver<>( + domainType, + (env) -> + ((ListDomainsResult) env.getSource()) + .getDomains().stream() + .map(Domain::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryTermsResult", + typeWiring -> + typeWiring.dataFetcher( + "terms", + new LoadableTypeBatchResolver<>( + glossaryTermType, + (env) -> + ((GetRootGlossaryTermsResult) env.getSource()) + .getTerms().stream() + .map(GlossaryTerm::getUrn) + .collect(Collectors.toList())))) + .type( + "GetRootGlossaryNodesResult", + typeWiring -> + typeWiring.dataFetcher( + "nodes", + new LoadableTypeBatchResolver<>( + glossaryNodeType, + (env) -> + ((GetRootGlossaryNodesResult) env.getSource()) + .getNodes().stream() + .map(GlossaryNode::getUrn) + .collect(Collectors.toList())))) + .type( + "AutoCompleteResults", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResults) env.getSource()).getEntities()))) + .type( + "AutoCompleteResultForEntity", + typeWiring -> + typeWiring.dataFetcher( + "entities", + new EntityTypeBatchResolver( + entityTypes, + (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities()))) + .type( + "PolicyMatchCriterionValue", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, + (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity()))) + .type( + "ListTestsResult", + typeWiring -> + typeWiring.dataFetcher( + "tests", + new LoadableTypeBatchResolver<>( + testType, + (env) -> + ((ListTestsResult) env.getSource()) + .getTests().stream() + .map(Test::getUrn) + .collect(Collectors.toList())))) + .type( + "QuickFilter", + typeWiring -> + typeWiring.dataFetcher( + "entity", + new EntityTypeResolver( + entityTypes, (env) -> ((QuickFilter) env.getSource()).getEntity()))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "ownershipType", + new EntityTypeResolver( + entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType()))); + } - private DataFetcher getEntityResolver() { - return new EntityTypeResolver(entityTypes, - (env) -> { - try { - Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME)); - return UrnToEntityMapper.map(urn); - } catch (Exception e) { - throw new RuntimeException("Failed to get entity", e); - } - }); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dataset} type. + */ + private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Dataset", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dataset) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getContainer() != null + ? dataset.getContainer().getUrn() + : null; + })) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Dataset dataset = env.getSource(); + return dataset.getDataPlatformInstance() != null + ? dataset.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "datasetProfiles", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "datasetProfile", + DatasetProfileMapper::map)) + .dataFetcher( + "operations", + new TimeSeriesAspectResolver( + this.entityClient, + "dataset", + "operation", + OperationMapper::map, + new SortCriterion() + .setField(OPERATION_EVENT_TIME_FIELD_NAME) + .setOrder(SortOrder.DESCENDING))) + .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) + .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) + .dataFetcher( + "health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher( + "assertions", new EntityAssertionsResolver(entityClient, graphClient)) + .dataFetcher("testResults", new TestResultsResolver(entityClient)) + .dataFetcher( + "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher("runs", new EntityRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) + .type( + "Owner", + typeWiring -> + typeWiring.dataFetcher( + "owner", + new OwnerTypeResolver<>( + ownerTypes, (env) -> ((Owner) env.getSource()).getOwner()))) + .type( + "UserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn()))) + .type( + "ForeignKeyConstraint", + typeWiring -> + typeWiring.dataFetcher( + "foreignDataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> + ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn()))) + .type( + "SiblingProperties", + typeWiring -> + typeWiring.dataFetcher( + "siblings", + new EntityTypeBatchResolver( + new ArrayList<>(entityTypes), + (env) -> ((SiblingProperties) env.getSource()).getSiblings()))) + .type( + "InstitutionalMemoryMetadata", + typeWiring -> + typeWiring.dataFetcher( + "author", + new LoadableTypeResolver<>( + corpUserType, + (env) -> + ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))) + .type( + "DatasetStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> { + DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } - private DataFetcher getResolver(LoadableType loadableType) { - return getResolver(loadableType, this::getUrnField); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.VersionedDataset} type. + */ + private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "VersionedDataset", + typeWiring -> typeWiring.dataFetcher("relationships", new StaticDataFetcher(null))); + } - private DataFetcher getResolver(LoadableType loadableType, - Function keyProvider) { - return new LoadableTypeResolver<>(loadableType, keyProvider); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. + */ + private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "AccessToken", + typeWiring -> + typeWiring.dataFetcher( + "metadata", + new LoadableTypeResolver<>( + accessTokenMetadataType, + (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn()))); + builder.type( + "ListAccessTokenResult", + typeWiring -> + typeWiring.dataFetcher( + "tokens", + new LoadableTypeBatchResolver<>( + accessTokenMetadataType, + (env) -> + ((ListAccessTokenResult) env.getSource()) + .getTokens().stream() + .map(AccessTokenMetadata::getUrn) + .collect(Collectors.toList())))); + } + + private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTerm", + typeWiring -> + typeWiring + .dataFetcher("schemaMetadata", new AspectResolver()) + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryNode", + typeWiring -> + typeWiring + .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + } + + private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "SchemaFieldEntity", + typeWiring -> + typeWiring.dataFetcher( + "parent", + new EntityTypeResolver( + entityTypes, (env) -> ((SchemaFieldEntity) env.getSource()).getParent()))); + } + + private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "EntityPath", + typeWiring -> + typeWiring.dataFetcher( + "path", + new BatchGetEntitiesResolver( + entityTypes, (env) -> ((EntityPath) env.getSource()).getPath()))); + } - private String getUrnField(DataFetchingEnvironment env) { - return env.getArgument(URN_FIELD_NAME); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpUser} type. + */ + private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpUser", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "CorpUserInfo", + typeWiring -> + typeWiring.dataFetcher( + "manager", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn()))); + } - private void configureMutationResolvers(final RuntimeWiring.Builder builder) { - builder.type("Mutation", typeWiring -> typeWiring - .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType)) - .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType)) - .dataFetcher("createTag", new CreateTagResolver(this.entityClient, this.entityService)) - .dataFetcher("updateTag", new MutableTypeResolver<>(tagType)) - .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService)) - .dataFetcher("deleteTag", new DeleteTagResolver(entityClient)) - .dataFetcher("updateChart", new MutableTypeResolver<>(chartType)) - .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType)) - .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType)) - .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType)) - .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType)) - .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType)) - .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType)) - .dataFetcher("addTag", new AddTagResolver(entityService)) - .dataFetcher("addTags", new AddTagsResolver(entityService)) - .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService)) - .dataFetcher("removeTag", new RemoveTagResolver(entityService)) - .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService)) - .dataFetcher("addTerm", new AddTermResolver(entityService)) - .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService)) - .dataFetcher("addTerms", new AddTermsResolver(entityService)) - .dataFetcher("removeTerm", new RemoveTermResolver(entityService)) - .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService)) - .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient)) - .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient)) - .dataFetcher("updateDescription", new UpdateDescriptionResolver(entityService, this.entityClient)) - .dataFetcher("addOwner", new AddOwnerResolver(entityService)) - .dataFetcher("addOwners", new AddOwnersResolver(entityService)) - .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService)) - .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService)) - .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService)) - .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient)) - .dataFetcher("removeLink", new RemoveLinkResolver(entityService)) - .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService)) - .dataFetcher("removeGroupMembers", new RemoveGroupMembersResolver(this.groupService)) - .dataFetcher("createGroup", new CreateGroupResolver(this.groupService)) - .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) - .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) - .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) - .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("moveDomain", new MoveDomainResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) - .dataFetcher("setDomain", new SetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) - .dataFetcher("updateDeprecation", new UpdateDeprecationResolver(this.entityClient, this.entityService)) - .dataFetcher("batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService)) - .dataFetcher("unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService)) - .dataFetcher("createSecret", new CreateSecretResolver(this.entityClient, this.secretService)) - .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient)) - .dataFetcher("createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService)) - .dataFetcher("revokeAccessToken", new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService)) - .dataFetcher("createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient)) - .dataFetcher("deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient)) - .dataFetcher("createIngestionExecutionRequest", new CreateIngestionExecutionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("cancelIngestionExecutionRequest", new CancelIngestionExecutionRequestResolver(this.entityClient)) - .dataFetcher("createTestConnectionRequest", new CreateTestConnectionRequestResolver(this.entityClient, this.ingestionConfiguration)) - .dataFetcher("deleteAssertion", new DeleteAssertionResolver(this.entityClient, this.entityService)) - .dataFetcher("createTest", new CreateTestResolver(this.entityClient)) - .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) - .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) - .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) - .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient, this.entityService)) - .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) - .dataFetcher("updateParentNode", new UpdateParentNodeResolver(this.entityService, this.entityClient)) - .dataFetcher("deleteGlossaryEntity", - new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) - .dataFetcher("updateName", new UpdateNameResolver(this.entityService, this.entityClient)) - .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService)) - .dataFetcher("removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService)) - .dataFetcher("createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService)) - .dataFetcher("batchUpdateSoftDeleted", new BatchUpdateSoftDeletedResolver(this.entityService)) - .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService)) - .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient)) - .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) - .dataFetcher("createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) - .dataFetcher("acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) - .dataFetcher("createPost", new CreatePostResolver(this.postService)) - .dataFetcher("deletePost", new DeletePostResolver(this.postService)) - .dataFetcher("batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient)) - .dataFetcher("createView", new CreateViewResolver(this.viewService)) - .dataFetcher("updateView", new UpdateViewResolver(this.viewService)) - .dataFetcher("deleteView", new DeleteViewResolver(this.viewService)) - .dataFetcher("updateGlobalViewsSettings", new UpdateGlobalViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateCorpUserViewsSettings", new UpdateCorpUserViewsSettingsResolver(this.settingsService)) - .dataFetcher("updateLineage", new UpdateLineageResolver(this.entityService, this.lineageService)) - .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService)) - .dataFetcher("createQuery", new CreateQueryResolver(this.queryService)) - .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService)) - .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService)) - .dataFetcher("createDataProduct", new CreateDataProductResolver(this.dataProductService)) - .dataFetcher("updateDataProduct", new UpdateDataProductResolver(this.dataProductService)) - .dataFetcher("deleteDataProduct", new DeleteDataProductResolver(this.dataProductService)) - .dataFetcher("batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService)) - .dataFetcher("createOwnershipType", new CreateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("updateOwnershipType", new UpdateOwnershipTypeResolver(this.ownershipTypeService)) - .dataFetcher("deleteOwnershipType", new DeleteOwnershipTypeResolver(this.ownershipTypeService)) - ); - } + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.CorpGroup} type. + */ + private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "CorpGroup", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder + .type( + "CorpGroupInfo", + typeWiring -> + typeWiring + .dataFetcher( + "admins", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getAdmins().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()))) + .dataFetcher( + "members", + new LoadableTypeBatchResolver<>( + corpUserType, + (env) -> + ((CorpGroupInfo) env.getSource()) + .getMembers().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList())))) + .type( + "ListGroupsResult", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + corpGroupType, + (env) -> + ((ListGroupsResult) env.getSource()) + .getGroups().stream() + .map(CorpGroup::getUrn) + .collect(Collectors.toList())))); + } + + private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "Tag", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "TagAssociation", + typeWiring -> + typeWiring.dataFetcher( + "tag", + new LoadableTypeResolver<>( + tagType, + (env) -> + ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()) + .getTag() + .getUrn()))); + } + + private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { + builder.type( + "GlossaryTermAssociation", + typeWiring -> + typeWiring.dataFetcher( + "term", + new LoadableTypeResolver<>( + glossaryTermType, + (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn()))); + } - private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("SearchResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchResult) env.getSource()).getEntity())) - ) - .type("MatchedField", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((MatchedField) env.getSource()).getEntity())) - ) - .type("SearchAcrossLineageResult", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity())) - ) - .type("AggregationMetadata", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((AggregationMetadata) env.getSource()).getEntity())) - ) - .type("RecommendationContent", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((RecommendationContent) env.getSource()).getEntity())) - ) - .type("BrowseResults", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((BrowseResults) env.getSource()).getEntities())) - ) - .type("ParentDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new EntityTypeBatchResolver(entityTypes, - (env) -> { - final ParentDomainsResult result = env.getSource(); - return result != null ? result.getDomains() : null; - })) - ) - .type("EntityRelationshipLegacy", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity())) - ) - .type("EntityRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((EntityRelationship) env.getSource()).getEntity())) - ) - .type("BrowseResultGroupV2", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity())) - ) - .type("BrowsePathEntry", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((BrowsePathEntry) env.getSource()).getEntity())) - ) - .type("LineageRelationship", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((LineageRelationship) env.getSource()).getEntity())) - .dataFetcher("createdActor", - new EntityTypeResolver(entityTypes, - (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getCreatedActor() != null ? relationship.getCreatedActor() : null; - }) - ) - .dataFetcher("updatedActor", - new EntityTypeResolver(entityTypes, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Notebook} type. + */ + private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Notebook", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final LineageRelationship relationship = env.getSource(); - return relationship.getUpdatedActor() != null ? relationship.getUpdatedActor() : null; - }) - ) - ) - .type("ListDomainsResult", typeWiring -> typeWiring - .dataFetcher("domains", new LoadableTypeBatchResolver<>(domainType, - (env) -> ((ListDomainsResult) env.getSource()).getDomains().stream() - .map(Domain::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryTermsResult", typeWiring -> typeWiring - .dataFetcher("terms", new LoadableTypeBatchResolver<>(glossaryTermType, - (env) -> ((GetRootGlossaryTermsResult) env.getSource()).getTerms().stream() - .map(GlossaryTerm::getUrn) - .collect(Collectors.toList()))) - ) - .type("GetRootGlossaryNodesResult", typeWiring -> typeWiring - .dataFetcher("nodes", new LoadableTypeBatchResolver<>(glossaryNodeType, - (env) -> ((GetRootGlossaryNodesResult) env.getSource()).getNodes().stream() - .map(GlossaryNode::getUrn) - .collect(Collectors.toList()))) - ) - .type("AutoCompleteResults", typeWiring -> typeWiring - .dataFetcher("entities", - new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResults) env.getSource()).getEntities())) - ) - .type("AutoCompleteResultForEntity", typeWiring -> typeWiring - .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes, - (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities())) - ) - .type("PolicyMatchCriterionValue", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity())) - ) - .type("ListTestsResult", typeWiring -> typeWiring - .dataFetcher("tests", new LoadableTypeBatchResolver<>(testType, - (env) -> ((ListTestsResult) env.getSource()).getTests().stream() - .map(Test::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuickFilter", typeWiring -> typeWiring - .dataFetcher("entity", new EntityTypeResolver(entityTypes, - (env) -> ((QuickFilter) env.getSource()).getEntity())) - ) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("ownershipType", new EntityTypeResolver(entityTypes, - (env) -> ((Owner) env.getSource()).getOwnershipType())) - ); - } + final Notebook notebook = env.getSource(); + return notebook.getDataPlatformInstance() != null + ? notebook.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dataset} type. - */ - private void configureDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Dataset", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Dashboard} type. + */ + private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Dashboard", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dataset) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("container", - new LoadableTypeResolver<>(containerType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getContainer() != null ? dataset.getContainer().getUrn() : null; - }) - ) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Dashboard dashboard = env.getSource(); + return dashboard.getDataPlatformInstance() != null + ? dashboard.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final Dataset dataset = env.getSource(); - return dataset.getDataPlatformInstance() != null ? dataset.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("datasetProfiles", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "datasetProfile", - DatasetProfileMapper::map - ) - ) - .dataFetcher("operations", new TimeSeriesAspectResolver( - this.entityClient, - "dataset", - "operation", - OperationMapper::map, - new SortCriterion().setField(OPERATION_EVENT_TIME_FIELD_NAME).setOrder(SortOrder.DESCENDING) - ) - ) - .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient)) - .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient)) - .dataFetcher("health", new DatasetHealthResolver(graphClient, timeseriesAspectService)) - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("assertions", new EntityAssertionsResolver(entityClient, graphClient)) - .dataFetcher("testResults", new TestResultsResolver(entityClient)) - .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("runs", new EntityRunsResolver(entityClient)) + final Dashboard dashboard = env.getSource(); + return dashboard.getContainer() != null + ? dashboard.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) + .dataFetcher( + "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))) - .type("Owner", typeWiring -> typeWiring - .dataFetcher("owner", new OwnerTypeResolver<>(ownerTypes, - (env) -> ((Owner) env.getSource()).getOwner())) - ) - .type("UserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>(corpUserType, - (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn())) - ) - .type("ForeignKeyConstraint", typeWiring -> typeWiring - .dataFetcher("foreignDataset", new LoadableTypeResolver<>(datasetType, - (env) -> ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn())) - ) - .type("SiblingProperties", typeWiring -> typeWiring - .dataFetcher("siblings", - new EntityTypeBatchResolver( - new ArrayList<>(entityTypes), - (env) -> ((SiblingProperties) env.getSource()).getSiblings())) - ) - .type("InstitutionalMemoryMetadata", typeWiring -> typeWiring - .dataFetcher("author", new LoadableTypeResolver<>(corpUserType, - (env) -> ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())) - ) - .type("DatasetStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "DashboardInfo", + typeWiring -> + typeWiring.dataFetcher( + "charts", + new LoadableTypeBatchResolver<>( + chartType, + (env) -> + ((DashboardInfo) env.getSource()) + .getCharts().stream() + .map(Chart::getUrn) + .collect(Collectors.toList())))); + builder.type( + "DashboardUserUsageCounts", + typeWiring -> + typeWiring.dataFetcher( + "user", + new LoadableTypeResolver<>( + corpUserType, + (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn()))); + builder.type( + "DashboardStatsSummary", + typeWiring -> + typeWiring.dataFetcher( + "topUsersLast30Days", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.VersionedDataset} type. - */ - private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) { - builder - .type("VersionedDataset", typeWiring -> typeWiring - .dataFetcher("relationships", new StaticDataFetcher(null))); - - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type. - */ - private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) { - builder.type("AccessToken", typeWiring -> typeWiring - .dataFetcher("metadata", new LoadableTypeResolver<>(accessTokenMetadataType, - (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn())) - ); - builder.type("ListAccessTokenResult", typeWiring -> typeWiring - .dataFetcher("tokens", new LoadableTypeBatchResolver<>(accessTokenMetadataType, - (env) -> ((ListAccessTokenResult) env.getSource()).getTokens().stream() - .map(AccessTokenMetadata::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring - .dataFetcher("schemaMetadata", new AspectResolver()) - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryNode", typeWiring -> typeWiring - .dataFetcher("parentNodes", new ParentNodesResolver(entityClient)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - } - - private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) { - builder.type("SchemaFieldEntity", typeWiring -> typeWiring - .dataFetcher("parent", new EntityTypeResolver(entityTypes, - (env) -> ((SchemaFieldEntity) env.getSource()).getParent())) - ); - } - - private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) { - builder.type("EntityPath", typeWiring -> typeWiring - .dataFetcher("path", new BatchGetEntitiesResolver(entityTypes, - (env) -> ((EntityPath) env.getSource()).getPath())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpUser} type. - */ - private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpUser", typeWiring -> typeWiring - .dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("CorpUserInfo", typeWiring -> typeWiring - .dataFetcher("manager", new LoadableTypeResolver<>(corpUserType, - (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn())) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpGroup} type. - */ - private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) { - builder.type("CorpGroup", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService))); - builder.type("CorpGroupInfo", typeWiring -> typeWiring - .dataFetcher("admins", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getAdmins().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - .dataFetcher("members", - new LoadableTypeBatchResolver<>(corpUserType, - (env) -> ((CorpGroupInfo) env.getSource()).getMembers().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()))) - ) - .type("ListGroupsResult", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(corpGroupType, - (env) -> ((ListGroupsResult) env.getSource()).getGroups().stream() - .map(CorpGroup::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("Tag", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - builder.type("TagAssociation", typeWiring -> typeWiring - .dataFetcher("tag", - new LoadableTypeResolver<>(tagType, - (env) -> ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()).getTag().getUrn())) - ); - } - - private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTermAssociation", typeWiring -> typeWiring - .dataFetcher("term", - new LoadableTypeResolver<>(glossaryTermType, - (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn())) - ); - } + DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); + return summary.getTopUsersLast30Days() != null + ? summary.getTopUsersLast30Days().stream() + .map(CorpUser::getUrn) + .collect(Collectors.toList()) + : null; + }))); + } /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Notebook} type. + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.Chart} type. */ - private void configureNotebookResolvers(final RuntimeWiring.Builder builder) { - builder.type("Notebook", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Notebook) env.getSource()).getPlatform().getUrn())) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Notebook notebook = env.getSource(); - return notebook.getDataPlatformInstance() != null ? notebook.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dashboard} type. - */ - private void configureDashboardResolvers(final RuntimeWiring.Builder builder) { - builder.type("Dashboard", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getDataPlatformInstance() != null ? dashboard.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>(containerType, - (env) -> { - final Dashboard dashboard = env.getSource(); - return dashboard.getContainer() != null ? dashboard.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService)) - .dataFetcher("statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("DashboardInfo", typeWiring -> typeWiring - .dataFetcher("charts", new LoadableTypeBatchResolver<>(chartType, - (env) -> ((DashboardInfo) env.getSource()).getCharts().stream() - .map(Chart::getUrn) - .collect(Collectors.toList()))) - ); - builder.type("DashboardUserUsageCounts", typeWiring -> typeWiring - .dataFetcher("user", new LoadableTypeResolver<>( - corpUserType, - (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn())) - ); - builder.type("DashboardStatsSummary", typeWiring -> typeWiring - .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType, - (env) -> { - DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource()); - return summary.getTopUsersLast30Days() != null - ? summary.getTopUsersLast30Days().stream() - .map(CorpUser::getUrn) - .collect(Collectors.toList()) - : null; - })) - ); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Chart} type. - */ - private void configureChartResolvers(final RuntimeWiring.Builder builder) { - builder.type("Chart", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getDataPlatformInstance() != null ? chart.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("container", new LoadableTypeResolver<>( - containerType, - (env) -> { - final Chart chart = env.getSource(); - return chart.getContainer() != null ? chart.getContainer().getUrn() : null; - }) - ) - .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) - .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) - .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ); - builder.type("ChartInfo", typeWiring -> typeWiring - .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((ChartInfo) env.getSource()).getInputs().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - ); - } - - /** - * Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. - */ - private void configureTypeResolvers(final RuntimeWiring.Builder builder) { - builder - .type("Entity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("EntityWithRelationships", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("BrowsableEntity", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(browsableTypes.stream() - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("OwnerType", typeWiring -> typeWiring - .typeResolver(new EntityInterfaceTypeResolver(ownerTypes.stream() - .filter(graphType -> graphType instanceof EntityType) - .map(graphType -> (EntityType) graphType) - .collect(Collectors.toList()) - ))) - .type("PlatformSchema", typeWiring -> typeWiring - .typeResolver(new PlatformSchemaUnionTypeResolver()) - ) - .type("HyperParameterValueType", typeWiring -> typeWiring - .typeResolver(new HyperParameterValueTypeResolver()) - ) - .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) - .type("TimeSeriesAspect", typeWiring -> typeWiring - .typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) - .type("ResultsType", typeWiring -> typeWiring - .typeResolver(new ResultsTypeResolver())); - } - - /** - * Configures custom type extensions leveraged within our GraphQL schema. - */ - private void configureTypeExtensions(final RuntimeWiring.Builder builder) { - builder.scalar(GraphQLLong); - } - - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataJob} type. - */ - private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataJob", typeWiring -> typeWiring + private void configureChartResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Chart", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("dataFlow", new LoadableTypeResolver<>(dataFlowType, - (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Chart) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final Chart chart = env.getSource(); + return chart.getDataPlatformInstance() != null + ? chart.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher( + "container", + new LoadableTypeResolver<>( + containerType, (env) -> { - final DataJob dataJob = env.getSource(); - return dataJob.getDataPlatformInstance() != null ? dataJob.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + final Chart chart = env.getSource(); + return chart.getContainer() != null + ? chart.getContainer().getUrn() + : null; + })) + .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)) + .dataFetcher( + "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService)) .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - ) - .type("DataJobInputOutput", typeWiring -> typeWiring - .dataFetcher("inputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("outputDatasets", new LoadableTypeBatchResolver<>(datasetType, - (env) -> ((DataJobInputOutput) env.getSource()).getOutputDatasets().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()))) - .dataFetcher("inputDatajobs", new LoadableTypeBatchResolver<>(dataJobType, - (env) -> ((DataJobInputOutput) env.getSource()).getInputDatajobs().stream() - .map(DataJob::getUrn) - .collect(Collectors.toList()))) - ); - } + .dataFetcher("exists", new EntityExistsResolver(entityService))); + builder.type( + "ChartInfo", + typeWiring -> + typeWiring.dataFetcher( + "inputs", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((ChartInfo) env.getSource()) + .getInputs().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList())))); + } + + /** Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. */ + private void configureTypeResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "Entity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "EntityWithRelationships", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + loadableTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "BrowsableEntity", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + browsableTypes.stream() + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "OwnerType", + typeWiring -> + typeWiring.typeResolver( + new EntityInterfaceTypeResolver( + ownerTypes.stream() + .filter(graphType -> graphType instanceof EntityType) + .map(graphType -> (EntityType) graphType) + .collect(Collectors.toList())))) + .type( + "PlatformSchema", + typeWiring -> typeWiring.typeResolver(new PlatformSchemaUnionTypeResolver())) + .type( + "HyperParameterValueType", + typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver())) + .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver())) + .type( + "TimeSeriesAspect", + typeWiring -> typeWiring.typeResolver(new TimeSeriesAspectInterfaceTypeResolver())) + .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver())); + } + + /** Configures custom type extensions leveraged within our GraphQL schema. */ + private void configureTypeExtensions(final RuntimeWiring.Builder builder) { + builder.scalar(GraphQLLong); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataFlow} type. - */ - private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataFlow", typeWiring -> typeWiring + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataJob} type. + */ + private void configureDataJobResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataJob", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "dataFlow", + new LoadableTypeResolver<>( + dataFlowType, + (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final DataJob dataJob = env.getSource(); + return dataJob.getDataPlatformInstance() != null + ? dataJob.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runs", new DataJobRunsResolver(entityClient)) + .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient)) + .dataFetcher("exists", new EntityExistsResolver(entityService))) + .type( + "DataJobInputOutput", + typeWiring -> + typeWiring + .dataFetcher( + "inputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "outputDatasets", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getOutputDatasets().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()))) + .dataFetcher( + "inputDatajobs", + new LoadableTypeBatchResolver<>( + dataJobType, + (env) -> + ((DataJobInputOutput) env.getSource()) + .getInputDatajobs().stream() + .map(DataJob::getUrn) + .collect(Collectors.toList())))); + } + + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.DataFlow} type. + */ + private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataFlow", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn())) .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final DataFlow dataFlow = env.getSource(); - return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } + final DataFlow dataFlow = env.getSource(); + return dataFlow.getDataPlatformInstance() != null + ? dataFlow.getDataPlatformInstance().getUrn() + : null; + }))); + } - /** - * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.MLFeatureTable} type. - */ - private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { - builder - .type("MLFeatureTable", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", - new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + /** + * Configures resolvers responsible for resolving the {@link + * com.linkedin.datahub.graphql.generated.MLFeatureTable} type. + */ + private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "MLFeatureTable", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeatureTable entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeatureTableProperties", + typeWiring -> + typeWiring + .dataFetcher( + "mlFeatures", + new LoadableTypeBatchResolver<>( + mlFeatureType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlFeatures().stream() + .map(MLFeature::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of())) + .dataFetcher( + "mlPrimaryKeys", + new LoadableTypeBatchResolver<>( + mlPrimaryKeyType, + (env) -> + ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() + != null + ? ((MLFeatureTableProperties) env.getSource()) + .getMlPrimaryKeys().stream() + .map(MLPrimaryKey::getUrn) + .collect(Collectors.toList()) + : ImmutableList.of()))) + .type( + "MLFeatureProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, (env) -> { - final MLFeatureTable entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeatureTableProperties", typeWiring -> typeWiring - .dataFetcher("mlFeatures", - new LoadableTypeBatchResolver<>(mlFeatureType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlFeatures().stream() - .map(MLFeature::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - .dataFetcher("mlPrimaryKeys", - new LoadableTypeBatchResolver<>(mlPrimaryKeyType, - (env) -> - ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() != null - ? ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys().stream() - .map(MLPrimaryKey::getUrn) - .collect(Collectors.toList()) : ImmutableList.of())) - ) - .type("MLFeatureProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLFeatureProperties) env.getSource()).getSources() == null) { + if (((MLFeatureProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLFeatureProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLPrimaryKeyProperties", typeWiring -> typeWiring - .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType, - (env) -> { - if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { + } + return ((MLFeatureProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLPrimaryKeyProperties", + typeWiring -> + typeWiring.dataFetcher( + "sources", + new LoadableTypeBatchResolver<>( + datasetType, + (env) -> { + if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) { return Collections.emptyList(); - } - return ((MLPrimaryKeyProperties) env.getSource()).getSources().stream() - .map(datasetType.getKeyProvider()) - .collect(Collectors.toList()); - }) - ) - ) - .type("MLModel", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + } + return ((MLPrimaryKeyProperties) env.getSource()) + .getSources().stream() + .map(datasetType.getKeyProvider()) + .collect(Collectors.toList()); + }))) + .type( + "MLModel", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModel) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModel mlModel = env.getSource(); + return mlModel.getDataPlatformInstance() != null + ? mlModel.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLModelProperties", + typeWiring -> + typeWiring.dataFetcher( + "groups", + new LoadableTypeBatchResolver<>( + mlModelGroupType, (env) -> { - final MLModel mlModel = env.getSource(); - return mlModel.getDataPlatformInstance() != null ? mlModel.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLModelProperties", typeWiring -> typeWiring - .dataFetcher("groups", new LoadableTypeBatchResolver<>(mlModelGroupType, - (env) -> { - MLModelProperties properties = env.getSource(); - if (properties.getGroups() != null) { + MLModelProperties properties = env.getSource(); + if (properties.getGroups() != null) { return properties.getGroups().stream() .map(MLModelGroup::getUrn) .collect(Collectors.toList()); - } - return Collections.emptyList(); - }) - ) - ) - .type("MLModelGroup", typeWiring -> typeWiring + } + return Collections.emptyList(); + }))) + .type( + "MLModelGroup", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher( + "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn())) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLModelGroup entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLFeature", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLFeature entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))) + .type( + "MLPrimaryKey", + typeWiring -> + typeWiring + .dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient)) + .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) + .dataFetcher("exists", new EntityExistsResolver(entityService)) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, + (env) -> { + final MLPrimaryKey entity = env.getSource(); + return entity.getDataPlatformInstance() != null + ? entity.getDataPlatformInstance().getUrn() + : null; + }))); + } + + private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "GlossaryTerm", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "GlossaryNode", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureDomainResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Domain", + typeWiring -> + typeWiring + .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) + .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + builder.type( + "DomainAssociation", + typeWiring -> + typeWiring.dataFetcher( + "domain", + new LoadableTypeResolver<>( + domainType, + (env) -> + ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()) + .getDomain() + .getUrn()))); + } + + private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProduct", + typeWiring -> + typeWiring + .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "Assertion", + typeWiring -> + typeWiring .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn()) - ) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) + .dataFetcher( + "dataPlatformInstance", + new LoadableTypeResolver<>( + dataPlatformInstanceType, (env) -> { - final MLModelGroup entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLFeature", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final Assertion assertion = env.getSource(); + return assertion.getDataPlatformInstance() != null + ? assertion.getDataPlatformInstance().getUrn() + : null; + })) + .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); + } + + private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { + // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. + builder.type( + "ActorFilter", + typeWiring -> + typeWiring + .dataFetcher( + "resolvedUsers", + new LoadableTypeBatchResolver<>( + corpUserType, (env) -> { - final MLFeature entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ) - .type("MLPrimaryKey", typeWiring -> typeWiring - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("exists", new EntityExistsResolver(entityService)) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, + final ActorFilter filter = env.getSource(); + return filter.getUsers(); + })) + .dataFetcher( + "resolvedGroups", + new LoadableTypeBatchResolver<>( + corpGroupType, (env) -> { - final MLPrimaryKey entity = env.getSource(); - return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null; - }) - ) - ); - } - - private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) { - builder.type("GlossaryTerm", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))) - .type("GlossaryNode", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureDomainResolvers(final RuntimeWiring.Builder builder) { - builder.type("Domain", typeWiring -> typeWiring - .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient)) - .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - builder.type("DomainAssociation", typeWiring -> typeWiring - .dataFetcher("domain", - new LoadableTypeResolver<>(domainType, - (env) -> ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()).getDomain().getUrn())) - ); - } - - private void configureDataProductResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProduct", typeWiring -> typeWiring - .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient)) - .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) - ); - } - - private void configureAssertionResolvers(final RuntimeWiring.Builder builder) { - builder.type("Assertion", typeWiring -> typeWiring.dataFetcher("relationships", - new EntityRelationshipsResultResolver(graphClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> ((Assertion) env.getSource()).getPlatform().getUrn())) - .dataFetcher("dataPlatformInstance", - new LoadableTypeResolver<>(dataPlatformInstanceType, - (env) -> { - final Assertion assertion = env.getSource(); - return assertion.getDataPlatformInstance() != null ? assertion.getDataPlatformInstance().getUrn() : null; - }) - ) - .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient))); - } - - private void configurePolicyResolvers(final RuntimeWiring.Builder builder) { - // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type. - builder.type("ActorFilter", typeWiring -> typeWiring.dataFetcher("resolvedUsers", - new LoadableTypeBatchResolver<>(corpUserType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getUsers(); - })).dataFetcher("resolvedGroups", new LoadableTypeBatchResolver<>(corpGroupType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getGroups(); - })).dataFetcher("resolvedRoles", new LoadableTypeBatchResolver<>(dataHubRoleType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getRoles(); - })).dataFetcher("resolvedOwnershipTypes", new LoadableTypeBatchResolver<>(ownershipType, (env) -> { - final ActorFilter filter = env.getSource(); - return filter.getResourceOwnersTypes(); - }))); - } - - private void configureRoleResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataHubRole", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))); - } - - private void configureViewResolvers(final RuntimeWiring.Builder builder) { - builder - .type("DataHubView", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListViewsResult", typeWiring -> typeWiring - .dataFetcher("views", new LoadableTypeBatchResolver<>( - dataHubViewType, - (env) -> ((ListViewsResult) env.getSource()).getViews().stream() - .map(DataHubView::getUrn) - .collect(Collectors.toList()))) - ) - .type("CorpUserViewsSettings", typeWiring -> typeWiring - .dataFetcher("defaultView", new LoadableTypeResolver<>( + final ActorFilter filter = env.getSource(); + return filter.getGroups(); + })) + .dataFetcher( + "resolvedRoles", + new LoadableTypeBatchResolver<>( + dataHubRoleType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getRoles(); + })) + .dataFetcher( + "resolvedOwnershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> { + final ActorFilter filter = env.getSource(); + return filter.getResourceOwnersTypes(); + }))); + } + + private void configureRoleResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataHubRole", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))); + } + + private void configureViewResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "DataHubView", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListViewsResult", + typeWiring -> + typeWiring.dataFetcher( + "views", + new LoadableTypeBatchResolver<>( + dataHubViewType, + (env) -> + ((ListViewsResult) env.getSource()) + .getViews().stream() + .map(DataHubView::getUrn) + .collect(Collectors.toList())))) + .type( + "CorpUserViewsSettings", + typeWiring -> + typeWiring.dataFetcher( + "defaultView", + new LoadableTypeResolver<>( dataHubViewType, (env) -> { - final CorpUserViewsSettings settings = env.getSource(); - if (settings.getDefaultView() != null) { - return settings.getDefaultView().getUrn(); - } - return null; - } - ) - )); - } - - private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { - builder - .type("QueryEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListQueriesResult", typeWiring -> typeWiring - .dataFetcher("queries", new LoadableTypeBatchResolver<>( - queryType, - (env) -> ((ListQueriesResult) env.getSource()).getQueries().stream() - .map(QueryEntity::getUrn) - .collect(Collectors.toList()))) - ) - .type("QuerySubject", typeWiring -> typeWiring - .dataFetcher("dataset", new LoadableTypeResolver<>( - datasetType, - (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn())) - ); - - } - - private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { - builder - .type("OwnershipTypeEntity", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))) - .type("ListOwnershipTypesResult", typeWiring -> typeWiring - .dataFetcher("ownershipTypes", new LoadableTypeBatchResolver<>(ownershipType, - (env) -> ((ListOwnershipTypesResult) env.getSource()).getOwnershipTypes().stream() - .map(OwnershipTypeEntity::getUrn) - .collect(Collectors.toList()))) - ); - } - - private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { - builder.type("DataProcessInstance", - typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) + final CorpUserViewsSettings settings = env.getSource(); + if (settings.getDefaultView() != null) { + return settings.getDefaultView().getUrn(); + } + return null; + }))); + } + + private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) { + builder + .type( + "QueryEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListQueriesResult", + typeWiring -> + typeWiring.dataFetcher( + "queries", + new LoadableTypeBatchResolver<>( + queryType, + (env) -> + ((ListQueriesResult) env.getSource()) + .getQueries().stream() + .map(QueryEntity::getUrn) + .collect(Collectors.toList())))) + .type( + "QuerySubject", + typeWiring -> + typeWiring.dataFetcher( + "dataset", + new LoadableTypeResolver<>( + datasetType, + (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn()))); + } + + private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) { + builder + .type( + "OwnershipTypeEntity", + typeWiring -> + typeWiring.dataFetcher( + "relationships", new EntityRelationshipsResultResolver(graphClient))) + .type( + "ListOwnershipTypesResult", + typeWiring -> + typeWiring.dataFetcher( + "ownershipTypes", + new LoadableTypeBatchResolver<>( + ownershipType, + (env) -> + ((ListOwnershipTypesResult) env.getSource()) + .getOwnershipTypes().stream() + .map(OwnershipTypeEntity::getUrn) + .collect(Collectors.toList())))); + } + + private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "DataProcessInstance", + typeWiring -> + typeWiring + .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)) .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService)) - .dataFetcher("state", new TimeSeriesAspectResolver(this.entityClient, "dataProcessInstance", - DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, DataProcessInstanceRunEventMapper::map))); - } - - private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { - builder.type("TestResult", typeWiring -> typeWiring - .dataFetcher("test", new LoadableTypeResolver<>(testType, - (env) -> { - final TestResult testResult = env.getSource(); - return testResult.getTest() != null ? testResult.getTest().getUrn() : null; - })) - ); - } - - private DataLoader> createDataLoader(final LoadableType graphType, final QueryContext queryContext) { - BatchLoaderContextProvider contextProvider = () -> queryContext; - DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); - return DataLoader.newDataLoader((keys, context) -> CompletableFuture.supplyAsync(() -> { - try { - log.debug(String.format("Batch loading entities of type: %s, keys: %s", graphType.name(), keys)); - return graphType.batchLoad(keys, context.getContext()); - } catch (Exception e) { - log.error(String.format("Failed to load Entities of type: %s, keys: %s", graphType.name(), keys) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to retrieve entities of type %s", graphType.name()), e); - } - }), loaderOptions); - } - - private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { - builder.type("IngestionSource", typeWiring -> typeWiring - .dataFetcher("executions", new IngestionSourceExecutionRequestsResolver(entityClient)) - .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType, - (env) -> { - final IngestionSource ingestionSource = env.getSource(); - return ingestionSource.getPlatform() != null ? ingestionSource.getPlatform().getUrn() : null; - }) - )); - } + .dataFetcher( + "state", + new TimeSeriesAspectResolver( + this.entityClient, + "dataProcessInstance", + DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, + DataProcessInstanceRunEventMapper::map))); + } + + private void configureTestResultResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "TestResult", + typeWiring -> + typeWiring.dataFetcher( + "test", + new LoadableTypeResolver<>( + testType, + (env) -> { + final TestResult testResult = env.getSource(); + return testResult.getTest() != null ? testResult.getTest().getUrn() : null; + }))); + } + + private DataLoader> createDataLoader( + final LoadableType graphType, final QueryContext queryContext) { + BatchLoaderContextProvider contextProvider = () -> queryContext; + DataLoaderOptions loaderOptions = + DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider); + return DataLoader.newDataLoader( + (keys, context) -> + CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + String.format( + "Batch loading entities of type: %s, keys: %s", + graphType.name(), keys)); + return graphType.batchLoad(keys, context.getContext()); + } catch (Exception e) { + log.error( + String.format( + "Failed to load Entities of type: %s, keys: %s", + graphType.name(), keys) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to retrieve entities of type %s", graphType.name()), + e); + } + }), + loaderOptions); + } + + private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) { + builder.type( + "IngestionSource", + typeWiring -> + typeWiring + .dataFetcher( + "executions", new IngestionSourceExecutionRequestsResolver(entityClient)) + .dataFetcher( + "platform", + new LoadableTypeResolver<>( + dataPlatformType, + (env) -> { + final IngestionSource ingestionSource = env.getSource(); + return ingestionSource.getPlatform() != null + ? ingestionSource.getPlatform().getUrn() + : null; + }))); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java index 157fb10ce70785..4829194a8ce4d9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java @@ -38,41 +38,41 @@ @Data public class GmsGraphQLEngineArgs { - EntityClient entityClient; - SystemEntityClient systemEntityClient; - GraphClient graphClient; - UsageClient usageClient; - AnalyticsService analyticsService; - EntityService entityService; - RecommendationsService recommendationsService; - StatefulTokenService statefulTokenService; - TimeseriesAspectService timeseriesAspectService; - EntityRegistry entityRegistry; - SecretService secretService; - NativeUserService nativeUserService; - IngestionConfiguration ingestionConfiguration; - AuthenticationConfiguration authenticationConfiguration; - AuthorizationConfiguration authorizationConfiguration; - GitVersion gitVersion; - TimelineService timelineService; - boolean supportsImpactAnalysis; - VisualConfiguration visualConfiguration; - TelemetryConfiguration telemetryConfiguration; - TestsConfiguration testsConfiguration; - DataHubConfiguration datahubConfiguration; - ViewsConfiguration viewsConfiguration; - SiblingGraphService siblingGraphService; - GroupService groupService; - RoleService roleService; - InviteTokenService inviteTokenService; - PostService postService; - ViewService viewService; - OwnershipTypeService ownershipTypeService; - SettingsService settingsService; - LineageService lineageService; - QueryService queryService; - FeatureFlags featureFlags; - DataProductService dataProductService; + EntityClient entityClient; + SystemEntityClient systemEntityClient; + GraphClient graphClient; + UsageClient usageClient; + AnalyticsService analyticsService; + EntityService entityService; + RecommendationsService recommendationsService; + StatefulTokenService statefulTokenService; + TimeseriesAspectService timeseriesAspectService; + EntityRegistry entityRegistry; + SecretService secretService; + NativeUserService nativeUserService; + IngestionConfiguration ingestionConfiguration; + AuthenticationConfiguration authenticationConfiguration; + AuthorizationConfiguration authorizationConfiguration; + GitVersion gitVersion; + TimelineService timelineService; + boolean supportsImpactAnalysis; + VisualConfiguration visualConfiguration; + TelemetryConfiguration telemetryConfiguration; + TestsConfiguration testsConfiguration; + DataHubConfiguration datahubConfiguration; + ViewsConfiguration viewsConfiguration; + SiblingGraphService siblingGraphService; + GroupService groupService; + RoleService roleService; + InviteTokenService inviteTokenService; + PostService postService; + ViewService viewService; + OwnershipTypeService ownershipTypeService; + SettingsService settingsService; + LineageService lineageService; + QueryService queryService; + FeatureFlags featureFlags; + DataProductService dataProductService; - //any fork specific args should go below this line + // any fork specific args should go below this line } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java index e7ef0c402a1de5..472d9465aeee12 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java @@ -5,41 +5,42 @@ import java.util.Collection; import java.util.List; - /** - * An interface that allows the Core GMS GraphQL Engine to be extended without requiring - * code changes in the GmsGraphQLEngine class if new entities, relationships or resolvers - * need to be introduced. This is useful if you are maintaining a fork of DataHub and - * don't want to deal with merge conflicts. + * An interface that allows the Core GMS GraphQL Engine to be extended without requiring code + * changes in the GmsGraphQLEngine class if new entities, relationships or resolvers need to be + * introduced. This is useful if you are maintaining a fork of DataHub and don't want to deal with + * merge conflicts. */ public interface GmsGraphQLPlugin { /** * Initialization method that allows the plugin to instantiate + * * @param args */ void init(GmsGraphQLEngineArgs args); /** - * Return a list of schema files that contain graphql definitions - * that are served by this plugin + * Return a list of schema files that contain graphql definitions that are served by this plugin + * * @return */ List getSchemaFiles(); /** * Return a list of LoadableTypes that this plugin serves + * * @return */ Collection> getLoadableTypes(); /** - * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific resolvers. + * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific + * resolvers. + * * @param wiringBuilder : the builder being used to configure the runtime wiring * @param baseEngine : a reference to the core engine and its graphql types */ - default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) { - - } - + default void configureExtraResolvers( + final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java index 74c4c541b972b1..f95727a1e8fd1d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static graphql.schema.idl.RuntimeWiring.*; + import com.linkedin.datahub.graphql.exception.DataHubDataFetcherExceptionHandler; import graphql.ExecutionInput; import graphql.ExecutionResult; @@ -22,152 +24,157 @@ import org.dataloader.DataLoader; import org.dataloader.DataLoaderRegistry; -import static graphql.schema.idl.RuntimeWiring.*; - /** - * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and executing - * GQL queries. - - *

This class provides a {@link Builder} builder for constructing {@link GraphQL} instances provided one or more - * schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. + * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and + * executing GQL queries. + * + *

This class provides a {@link Builder} builder for constructing {@link GraphQL} instances + * provided one or more schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}. * - *

In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set of variables. + *

In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set + * of variables. */ public class GraphQLEngine { - private final GraphQL _graphQL; - private final Map>> _dataLoaderSuppliers; + private final GraphQL _graphQL; + private final Map>> _dataLoaderSuppliers; - private GraphQLEngine(@Nonnull final List schemas, - @Nonnull final RuntimeWiring runtimeWiring, - @Nonnull final Map>> dataLoaderSuppliers) { + private GraphQLEngine( + @Nonnull final List schemas, + @Nonnull final RuntimeWiring runtimeWiring, + @Nonnull final Map>> dataLoaderSuppliers) { - _dataLoaderSuppliers = dataLoaderSuppliers; + _dataLoaderSuppliers = dataLoaderSuppliers; - /* - * Parse schema - */ - SchemaParser schemaParser = new SchemaParser(); - TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); - schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); + /* + * Parse schema + */ + SchemaParser schemaParser = new SchemaParser(); + TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry(); + schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema))); - /* - * Configure resolvers (data fetchers) - */ - SchemaGenerator schemaGenerator = new SchemaGenerator(); - GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); + /* + * Configure resolvers (data fetchers) + */ + SchemaGenerator schemaGenerator = new SchemaGenerator(); + GraphQLSchema graphQLSchema = + schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring); - /* - * Instantiate engine - */ - _graphQL = new GraphQL.Builder(graphQLSchema) + /* + * Instantiate engine + */ + _graphQL = + new GraphQL.Builder(graphQLSchema) .defaultDataFetcherExceptionHandler(new DataHubDataFetcherExceptionHandler()) .instrumentation(new TracingInstrumentation()) .build(); - } + } + + public ExecutionResult execute( + @Nonnull final String query, + @Nullable final Map variables, + @Nonnull final QueryContext context) { + /* + * Init DataLoaderRegistry - should be created for each request. + */ + DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - public ExecutionResult execute(@Nonnull final String query, - @Nullable final Map variables, - @Nonnull final QueryContext context) { - /* - * Init DataLoaderRegistry - should be created for each request. - */ - DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context); - - /* - * Construct execution input - */ - ExecutionInput executionInput = ExecutionInput.newExecutionInput() + /* + * Construct execution input + */ + ExecutionInput executionInput = + ExecutionInput.newExecutionInput() .query(query) .variables(variables) .dataLoaderRegistry(register) .context(context) .build(); - /* - * Execute GraphQL Query - */ - return _graphQL.execute(executionInput); - } + /* + * Execute GraphQL Query + */ + return _graphQL.execute(executionInput); + } + + public GraphQL getGraphQL() { + return _graphQL; + } + + public static Builder builder() { + return new Builder(); + } + + /** Used to construct a {@link GraphQLEngine}. */ + public static class Builder { - public GraphQL getGraphQL() { - return _graphQL; + private final List _schemas = new ArrayList<>(); + private final Map>> _loaderSuppliers = + new HashMap<>(); + private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); + + /** + * Used to add a schema file containing the GQL types resolved by the engine. + * + *

If multiple files are provided, their schemas will be merged together. + */ + public Builder addSchema(final String schema) { + _schemas.add(schema); + return this; } - public static Builder builder() { - return new Builder(); + /** + * Used to register a {@link DataLoader} to be used within the configured resolvers. + * + *

The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} + * when invoked. + * + *

If multiple loaders are registered with the name, the latter will override the former. + */ + public Builder addDataLoader( + final String name, final Function> dataLoaderSupplier) { + _loaderSuppliers.put(name, dataLoaderSupplier); + return this; } /** - * Used to construct a {@link GraphQLEngine}. + * Used to register multiple {@link DataLoader}s for use within the configured resolvers. + * + *

The included {@link Supplier} provided is expected to return a new instance of {@link + * DataLoader} when invoked. + * + *

If multiple loaders are registered with the name, the latter will override the former. */ - public static class Builder { - - private final List _schemas = new ArrayList<>(); - private final Map>> _loaderSuppliers = new HashMap<>(); - private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring(); - - /** - * Used to add a schema file containing the GQL types resolved by the engine. - * - * If multiple files are provided, their schemas will be merged together. - */ - public Builder addSchema(final String schema) { - _schemas.add(schema); - return this; - } - - /** - * Used to register a {@link DataLoader} to be used within the configured resolvers. - * - * The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoader(final String name, final Function> dataLoaderSupplier) { - _loaderSuppliers.put(name, dataLoaderSupplier); - return this; - } - - /** - * Used to register multiple {@link DataLoader}s for use within the configured resolvers. - * - * The included {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked. - * - * If multiple loaders are registered with the name, the latter will override the former. - */ - public Builder addDataLoaders(Map>> dataLoaderSuppliers) { - _loaderSuppliers.putAll(dataLoaderSuppliers); - return this; - } - - /** - * Used to configure the runtime wiring (data fetchers & type resolvers) - * used in resolving the Graph QL schema. - * - * The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register any required - * data + type resolvers. - */ - public Builder configureRuntimeWiring(final Consumer builderFunc) { - builderFunc.accept(_runtimeWiringBuilder); - return this; - } - - /** - * Builds a {@link GraphQLEngine}. - */ - public GraphQLEngine build() { - return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); - } + public Builder addDataLoaders( + Map>> dataLoaderSuppliers) { + _loaderSuppliers.putAll(dataLoaderSuppliers); + return this; } - private DataLoaderRegistry createDataLoaderRegistry(final Map>> dataLoaderSuppliers, - final QueryContext context) { - final DataLoaderRegistry registry = new DataLoaderRegistry(); - for (String key : dataLoaderSuppliers.keySet()) { - registry.register(key, dataLoaderSuppliers.get(key).apply(context)); - } - return registry; + /** + * Used to configure the runtime wiring (data fetchers & type resolvers) used in resolving the + * Graph QL schema. + * + *

The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register + * any required data + type resolvers. + */ + public Builder configureRuntimeWiring(final Consumer builderFunc) { + builderFunc.accept(_runtimeWiringBuilder); + return this; } + /** Builds a {@link GraphQLEngine}. */ + public GraphQLEngine build() { + return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers); + } + } + + private DataLoaderRegistry createDataLoaderRegistry( + final Map>> dataLoaderSuppliers, + final QueryContext context) { + final DataLoaderRegistry registry = new DataLoaderRegistry(); + for (String key : dataLoaderSuppliers.keySet()) { + registry.register(key, dataLoaderSuppliers.get(key).apply(context)); + } + return registry; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java index 4803ef08fdddcf..9f110e713ed574 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java @@ -4,38 +4,25 @@ import com.datahub.authentication.Authentication; import com.datahub.plugins.auth.authorization.Authorizer; - -/** - * Provided as input to GraphQL resolvers; used to carry information about GQL request context. - */ +/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */ public interface QueryContext { - /** - * Returns true if the current actor is authenticated, false otherwise. - */ - boolean isAuthenticated(); + /** Returns true if the current actor is authenticated, false otherwise. */ + boolean isAuthenticated(); - /** - * Returns the {@link Authentication} associated with the current query context. - */ - Authentication getAuthentication(); + /** Returns the {@link Authentication} associated with the current query context. */ + Authentication getAuthentication(); - /** - * Returns the current authenticated actor, null if there is none. - */ - default Actor getActor() { - return getAuthentication().getActor(); - } + /** Returns the current authenticated actor, null if there is none. */ + default Actor getActor() { + return getAuthentication().getActor(); + } - /** - * Returns the current authenticated actor, null if there is none. - */ - default String getActorUrn() { - return getActor().toUrnStr(); - } + /** Returns the current authenticated actor, null if there is none. */ + default String getActorUrn() { + return getActor().toUrnStr(); + } - /** - * Returns the authorizer used to authorize specific actions. - */ - Authorizer getAuthorizer(); + /** Returns the authorizer used to authorize specific actions. */ + Authorizer getAuthorizer(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java index df7f0884852d47..425c86ab0f0f65 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java @@ -4,7 +4,6 @@ import lombok.AllArgsConstructor; import lombok.Data; - @Data @AllArgsConstructor public class RelationshipKey { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java index d51de6652bb0ac..c3ad37ddcb2018 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java @@ -10,11 +10,7 @@ public class TimeSeriesAspectArgs { private Long count; private TimeRange timeRange; - public TimeSeriesAspectArgs( - String urn, - String aspectName, - Long count, - TimeRange timeRange) { + public TimeSeriesAspectArgs(String urn, String aspectName, Long count, TimeRange timeRange) { this.urn = urn; this.aspectName = aspectName; this.count = count; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java index 5f703f520bde46..c7302c9772c5ef 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java @@ -3,7 +3,6 @@ import com.linkedin.usage.UsageTimeRange; import lombok.Data; - @Data public class UsageStatsKey { private String resource; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java index b0c0436ffd891a..6f81de5f04d8fc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java @@ -8,7 +8,7 @@ public class VersionedAspectKey { private String urn; private Long version; - public VersionedAspectKey(String urn, String aspectName, Long version) { + public VersionedAspectKey(String urn, String aspectName, Long version) { this.urn = urn; this.version = version; this.aspectName = aspectName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java index a78d89e59bc7bc..22ee4d4d4845c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.codec.JacksonDataCodec; @@ -26,68 +28,84 @@ import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @AllArgsConstructor public class WeaklyTypedAspectsResolver implements DataFetcher>> { - private final EntityClient _entityClient; - private final EntityRegistry _entityRegistry; - private static final JacksonDataCodec CODEC = new JacksonDataCodec(); + private final EntityClient _entityClient; + private final EntityRegistry _entityRegistry; + private static final JacksonDataCodec CODEC = new JacksonDataCodec(); - private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { - return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); - } + private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) { + return !params.getAutoRenderOnly() || aspectSpec.isAutoRender(); + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - List results = new ArrayList<>(); + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + List results = new ArrayList<>(); - final QueryContext context = environment.getContext(); - final String urnStr = ((Entity) environment.getSource()).getUrn(); - final EntityType entityType = ((Entity) environment.getSource()).getType(); - final String entityTypeName = EntityTypeMapper.getName(entityType); - final AspectParams input = bindArgument(environment.getArgument("input"), AspectParams.class); + final QueryContext context = environment.getContext(); + final String urnStr = ((Entity) environment.getSource()).getUrn(); + final EntityType entityType = ((Entity) environment.getSource()).getType(); + final String entityTypeName = EntityTypeMapper.getName(entityType); + final AspectParams input = + bindArgument(environment.getArgument("input"), AspectParams.class); - EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); - entitySpec.getAspectSpecs().stream().filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)).forEach(aspectSpec -> { - try { - Urn urn = Urn.createFromString(urnStr); - RawAspect result = new RawAspect(); - EntityResponse entityResponse = - _entityClient.batchGetV2(urn.getEntityType(), Collections.singleton(urn), - Collections.singleton(aspectSpec.getName()), context.getAuthentication()).get(urn); - if (entityResponse == null || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { + EntitySpec entitySpec = _entityRegistry.getEntitySpec(entityTypeName); + entitySpec.getAspectSpecs().stream() + .filter(aspectSpec -> shouldReturnAspect(aspectSpec, input)) + .forEach( + aspectSpec -> { + try { + Urn urn = Urn.createFromString(urnStr); + RawAspect result = new RawAspect(); + EntityResponse entityResponse = + _entityClient + .batchGetV2( + urn.getEntityType(), + Collections.singleton(urn), + Collections.singleton(aspectSpec.getName()), + context.getAuthentication()) + .get(urn); + if (entityResponse == null + || !entityResponse.getAspects().containsKey(aspectSpec.getName())) { return; - } + } - DataMap resolvedAspect = entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); - if (resolvedAspect == null) { + DataMap resolvedAspect = + entityResponse.getAspects().get(aspectSpec.getName()).getValue().data(); + if (resolvedAspect == null) { return; - } + } - result.setPayload(CODEC.mapToString(resolvedAspect)); - result.setAspectName(aspectSpec.getName()); + result.setPayload(CODEC.mapToString(resolvedAspect)); + result.setAspectName(aspectSpec.getName()); - DataMap renderSpec = aspectSpec.getRenderSpec(); + DataMap renderSpec = aspectSpec.getRenderSpec(); - if (renderSpec != null) { + if (renderSpec != null) { AspectRenderSpec resultRenderSpec = new AspectRenderSpec(); resultRenderSpec.setDisplayType(renderSpec.getString("displayType")); resultRenderSpec.setDisplayName(renderSpec.getString("displayName")); resultRenderSpec.setKey(renderSpec.getString("key")); result.setRenderSpec(resultRenderSpec); - } + } - results.add(result); - } catch (IOException | RemoteInvocationException | URISyntaxException e) { - throw new RuntimeException("Failed to fetch aspect " + aspectSpec.getName() + " for urn " + urnStr + " ", e); - } - }); - return results; + results.add(result); + } catch (IOException | RemoteInvocationException | URISyntaxException e) { + throw new RuntimeException( + "Failed to fetch aspect " + + aspectSpec.getName() + + " for urn " + + urnStr + + " ", + e); + } + }); + return results; }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java index 7728dcae5d8eef..3bf932c4281e8d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/AnalyticsChartTypeResolver.java @@ -7,18 +7,17 @@ import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - public class AnalyticsChartTypeResolver implements TypeResolver { - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TimeSeriesChart) { - return env.getSchema().getObjectType("TimeSeriesChart"); - } else if (env.getObject() instanceof BarChart) { - return env.getSchema().getObjectType("BarChart"); - } else if (env.getObject() instanceof TableChart) { - return env.getSchema().getObjectType("TableChart"); - } else { - throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TimeSeriesChart) { + return env.getSchema().getObjectType("TimeSeriesChart"); + } else if (env.getObject() instanceof BarChart) { + return env.getSchema().getObjectType("BarChart"); + } else if (env.getObject() instanceof TableChart) { + return env.getSchema().getObjectType("TableChart"); + } else { + throw new RuntimeException("Unrecognized object type provided to AnalyticsChart resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java index b8a5dd1121a109..6ba3c5090f1c40 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetChartsResolver.java @@ -27,15 +27,11 @@ import java.util.Collections; import java.util.List; import java.util.Optional; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @Slf4j @RequiredArgsConstructor public final class GetChartsResolver implements DataFetcher> { @@ -47,15 +43,17 @@ public final class GetChartsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { Authentication authentication = ResolverUtils.getAuthentication(environment); try { - return ImmutableList.of(AnalyticsChartGroup.builder() - .setGroupId("DataHubUsageAnalytics") - .setTitle("DataHub Usage Analytics") - .setCharts(getProductAnalyticsCharts(authentication)) - .build(), AnalyticsChartGroup.builder() - .setGroupId("GlobalMetadataAnalytics") - .setTitle("Data Landscape Summary") - .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) - .build()); + return ImmutableList.of( + AnalyticsChartGroup.builder() + .setGroupId("DataHubUsageAnalytics") + .setTitle("DataHub Usage Analytics") + .setCharts(getProductAnalyticsCharts(authentication)) + .build(), + AnalyticsChartGroup.builder() + .setGroupId("GlobalMetadataAnalytics") + .setTitle("Data Landscape Summary") + .setCharts(getGlobalMetadataAnalyticsCharts(authentication)) + .build()); } catch (Exception e) { log.error("Failed to retrieve analytics charts!", e); return Collections.emptyList(); // Simply return nothing. @@ -63,85 +61,116 @@ public final List get(DataFetchingEnvironment environment) } private TimeSeriesChart getActiveUsersTimeSeriesChart( - final DateTime beginning, - final DateTime end, - final String title, - final DateInterval interval - ) { + final DateTime beginning, + final DateTime end, + final String title, + final DateInterval interval) { final DateRange dateRange = - new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); + new DateRange(String.valueOf(beginning.getMillis()), String.valueOf(end.getMillis())); final List timeSeriesLines = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), dateRange, interval, - Optional.empty(), ImmutableMap.of(), Collections.emptyMap(), Optional.of("browserId")); + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + dateRange, + interval, + Optional.empty(), + ImmutableMap.of(), + Collections.emptyMap(), + Optional.of("browserId")); return TimeSeriesChart.builder() - .setTitle(title) - .setDateRange(dateRange) - .setInterval(interval) - .setLines(timeSeriesLines) - .build(); + .setTitle(title) + .setDateRange(dateRange) + .setInterval(interval) + .setLines(timeSeriesLines) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ - private List getProductAnalyticsCharts(Authentication authentication) throws Exception { + /** TODO: Config Driven Charts Instead of Hardcoded. */ + private List getProductAnalyticsCharts(Authentication authentication) + throws Exception { final List charts = new ArrayList<>(); DateUtil dateUtil = new DateUtil(); final DateTime startOfNextWeek = dateUtil.getStartOfNextWeek(); + final DateTime startOfThisMonth = dateUtil.getStartOfThisMonth(); final DateTime startOfNextMonth = dateUtil.getStartOfNextMonth(); final DateRange trailingWeekDateRange = dateUtil.getTrailingWeekDateRange(); - charts.add(getActiveUsersTimeSeriesChart( + charts.add( + getActiveUsersTimeSeriesChart( startOfNextWeek.minusWeeks(10), startOfNextWeek.minusMillis(1), "Weekly Active Users", - DateInterval.WEEK - )); - charts.add(getActiveUsersTimeSeriesChart( + DateInterval.WEEK)); + charts.add( + getActiveUsersTimeSeriesChart( startOfNextMonth.minusMonths(12), - startOfNextMonth.minusMillis(1), + startOfThisMonth.minusMillis(1), "Monthly Active Users", - DateInterval.MONTH - )); + DateInterval.MONTH)); String searchesTitle = "Searches Last Week"; DateInterval dailyInterval = DateInterval.DAY; String searchEventType = "SearchEvent"; final List searchesTimeseries = - _analyticsService.getTimeseriesChart(_analyticsService.getUsageIndexName(), trailingWeekDateRange, dailyInterval, - Optional.empty(), ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), + _analyticsService.getTimeseriesChart( + _analyticsService.getUsageIndexName(), + trailingWeekDateRange, + dailyInterval, + Optional.empty(), + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), Optional.empty()); - charts.add(TimeSeriesChart.builder() - .setTitle(searchesTitle) - .setDateRange(trailingWeekDateRange) - .setInterval(dailyInterval) - .setLines(searchesTimeseries) - .build()); + charts.add( + TimeSeriesChart.builder() + .setTitle(searchesTitle) + .setDateRange(trailingWeekDateRange) + .setInterval(dailyInterval) + .setLines(searchesTimeseries) + .build()); final String topSearchTitle = "Top Search Queries"; final List columns = ImmutableList.of("Query", "Count"); final List topSearchQueries = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "query.keyword", ImmutableMap.of("type", ImmutableList.of(searchEventType)), Collections.emptyMap(), - Optional.empty(), 10, AnalyticsUtil::buildCellWithSearchLandingPage); - charts.add(TableChart.builder().setTitle(topSearchTitle).setColumns(columns).setRows(topSearchQueries).build()); + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "query.keyword", + ImmutableMap.of("type", ImmutableList.of(searchEventType)), + Collections.emptyMap(), + Optional.empty(), + 10, + AnalyticsUtil::buildCellWithSearchLandingPage); + charts.add( + TableChart.builder() + .setTitle(topSearchTitle) + .setColumns(columns) + .setRows(topSearchQueries) + .build()); final String sectionViewsTitle = "Section Views across Entity Types"; final List sectionViewsPerEntityType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "section.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), Collections.emptyMap(), - Optional.empty(), true); - charts.add(BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); + ImmutableMap.of("type", ImmutableList.of("EntitySectionViewEvent")), + Collections.emptyMap(), + Optional.empty(), + true); + charts.add( + BarChart.builder().setTitle(sectionViewsTitle).setBars(sectionViewsPerEntityType).build()); final String actionsByTypeTitle = "Actions by Entity Type"; final List eventsByEventType = - _analyticsService.getBarChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), + _analyticsService.getBarChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), ImmutableList.of("entityType.keyword", "actionType.keyword"), - ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), Collections.emptyMap(), Optional.empty(), + ImmutableMap.of("type", ImmutableList.of("EntityActionEvent")), + Collections.emptyMap(), + Optional.empty(), true); charts.add(BarChart.builder().setTitle(actionsByTypeTitle).setBars(eventsByEventType).build()); @@ -149,61 +178,128 @@ private List getProductAnalyticsCharts(Authentication authentica final List columns5 = ImmutableList.of("Dataset", "#Views"); final List topViewedDatasets = - _analyticsService.getTopNTableChart(_analyticsService.getUsageIndexName(), Optional.of(trailingWeekDateRange), - "entityUrn.keyword", ImmutableMap.of("type", ImmutableList.of("EntityViewEvent"), "entityType.keyword", - ImmutableList.of(EntityType.DATASET.name())), Collections.emptyMap(), Optional.empty(), 10, + _analyticsService.getTopNTableChart( + _analyticsService.getUsageIndexName(), + Optional.of(trailingWeekDateRange), + "entityUrn.keyword", + ImmutableMap.of( + "type", + ImmutableList.of("EntityViewEvent"), + "entityType.keyword", + ImmutableList.of(EntityType.DATASET.name())), + Collections.emptyMap(), + Optional.empty(), + 10, AnalyticsUtil::buildCellWithEntityLandingPage); - AnalyticsUtil.hydrateDisplayNameForTable(_entityClient, topViewedDatasets, Constants.DATASET_ENTITY_NAME, - ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), AnalyticsUtil::getDatasetName, authentication); - charts.add(TableChart.builder().setTitle(topViewedTitle).setColumns(columns5).setRows(topViewedDatasets).build()); + AnalyticsUtil.hydrateDisplayNameForTable( + _entityClient, + topViewedDatasets, + Constants.DATASET_ENTITY_NAME, + ImmutableSet.of(Constants.DATASET_KEY_ASPECT_NAME), + AnalyticsUtil::getDatasetName, + authentication); + charts.add( + TableChart.builder() + .setTitle(topViewedTitle) + .setColumns(columns5) + .setRows(topViewedDatasets) + .build()); return charts; } - private List getGlobalMetadataAnalyticsCharts(Authentication authentication) throws Exception { + private List getGlobalMetadataAnalyticsCharts(Authentication authentication) + throws Exception { final List charts = new ArrayList<>(); // Chart 1: Entities per domain final List entitiesPerDomain = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("domains.keyword", "platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerDomain, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); - AnalyticsUtil.hydrateDisplayNameForSegments(_entityClient, entitiesPerDomain, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("domains.keyword", "platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerDomain, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); + AnalyticsUtil.hydrateDisplayNameForSegments( + _entityClient, + entitiesPerDomain, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerDomain.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); + charts.add( + BarChart.builder().setTitle("Entities per Domain").setBars(entitiesPerDomain).build()); } // Chart 2: Entities per platform final List entitiesPerPlatform = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("platform.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerPlatform, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("platform.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerPlatform, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); if (!entitiesPerPlatform.isEmpty()) { - charts.add(BarChart.builder().setTitle("Entities per Platform").setBars(entitiesPerPlatform).build()); + charts.add( + BarChart.builder() + .setTitle("Entities per Platform") + .setBars(entitiesPerPlatform) + .build()); } // Chart 3: Entities per term final List entitiesPerTerm = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("glossaryTerms.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, entitiesPerTerm, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("glossaryTerms.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + entitiesPerTerm, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); if (!entitiesPerTerm.isEmpty()) { charts.add(BarChart.builder().setTitle("Entities per Term").setBars(entitiesPerTerm).build()); } // Chart 4: Entities per fabric type final List entitiesPerEnv = - _analyticsService.getBarChart(_analyticsService.getAllEntityIndexName(), Optional.empty(), - ImmutableList.of("origin.keyword"), Collections.emptyMap(), - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty(), false); + _analyticsService.getBarChart( + _analyticsService.getAllEntityIndexName(), + Optional.empty(), + ImmutableList.of("origin.keyword"), + Collections.emptyMap(), + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty(), + false); if (entitiesPerEnv.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); + charts.add( + BarChart.builder().setTitle("Entities per Environment").setBars(entitiesPerEnv).build()); } return charts; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java index c631a13b0bcb6d..7000ab7adff5d5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetHighlightsResolver.java @@ -14,15 +14,11 @@ import java.util.Map; import java.util.Optional; import java.util.function.Function; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.joda.time.DateTime; - -/** - * Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Highlights to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetHighlightsResolver implements DataFetcher> { @@ -40,69 +36,72 @@ public final List get(DataFetchingEnvironment environment) throws Exc } private Highlight getTimeBasedHighlight( - final String title, - final String changeString, - final DateTime endDateTime, - final Function periodStartFunc - ) { + final String title, + final String changeString, + final DateTime endDateTime, + final Function periodStartFunc) { DateTime startDate = periodStartFunc.apply(endDateTime); DateTime timeBeforeThat = periodStartFunc.apply(startDate); - DateRange dateRangeThis = new DateRange( - String.valueOf(startDate.getMillis()), - String.valueOf(endDateTime.getMillis()) - ); - DateRange dateRangeLast = new DateRange( - String.valueOf(timeBeforeThat.getMillis()), - String.valueOf(startDate.getMillis()) - ); - - int activeUsersThisRange = _analyticsService.getHighlights( + DateRange dateRangeThis = + new DateRange( + String.valueOf(startDate.getMillis()), String.valueOf(endDateTime.getMillis())); + DateRange dateRangeLast = + new DateRange( + String.valueOf(timeBeforeThat.getMillis()), String.valueOf(startDate.getMillis())); + + int activeUsersThisRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeThis), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); - int activeUsersLastRange = _analyticsService.getHighlights( + Optional.of("browserId")); + int activeUsersLastRange = + _analyticsService.getHighlights( _analyticsService.getUsageIndexName(), Optional.of(dateRangeLast), ImmutableMap.of(), ImmutableMap.of(), - Optional.of("browserId") - ); + Optional.of("browserId")); String bodyText = ""; if (activeUsersLastRange > 0) { - double percentChange = (double) (activeUsersThisRange - activeUsersLastRange) - / (double) activeUsersLastRange * 100; + double percentChange = + (double) (activeUsersThisRange - activeUsersLastRange) + / (double) activeUsersLastRange + * 100; String directionChange = percentChange > 0 ? "increase" : "decrease"; - bodyText = Double.isInfinite(percentChange) ? "" + bodyText = + Double.isInfinite(percentChange) + ? "" : String.format(changeString, percentChange, directionChange); } - return Highlight.builder().setTitle(title).setValue(activeUsersThisRange).setBody(bodyText).build(); + return Highlight.builder() + .setTitle(title) + .setValue(activeUsersThisRange) + .setBody(bodyText) + .build(); } - /** - * TODO: Config Driven Charts Instead of Hardcoded. - */ + /** TODO: Config Driven Charts Instead of Hardcoded. */ private List getHighlights() { final List highlights = new ArrayList<>(); DateTime endDate = DateTime.now(); - highlights.add(getTimeBasedHighlight( + highlights.add( + getTimeBasedHighlight( "Weekly Active Users", "%.2f%% %s from last week", endDate, - (date) -> date.minusWeeks(1) - )); - highlights.add(getTimeBasedHighlight( + (date) -> date.minusWeeks(1))); + highlights.add( + getTimeBasedHighlight( "Monthly Active Users", "%.2f%% %s from last month", endDate, - (date) -> date.minusMonths(1) - )); + (date) -> date.minusMonths(1))); // Entity metdata statistics getEntityMetadataStats("Datasets", EntityType.DATASET).ifPresent(highlights::add); @@ -121,10 +120,13 @@ private Optional getEntityMetadataStats(String title, EntityType enti if (numEntities == 0) { return Optional.empty(); } - int numEntitiesWithOwners = getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); - int numEntitiesWithTags = getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); + int numEntitiesWithOwners = + getNumEntitiesFiltered(index, ImmutableMap.of("hasOwners", ImmutableList.of("true"))); + int numEntitiesWithTags = + getNumEntitiesFiltered(index, ImmutableMap.of("hasTags", ImmutableList.of("true"))); int numEntitiesWithGlossaryTerms = - getNumEntitiesFiltered(index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); + getNumEntitiesFiltered( + index, ImmutableMap.of("hasGlossaryTerms", ImmutableList.of("true"))); int numEntitiesWithDescription = getNumEntitiesFiltered(index, ImmutableMap.of("hasDescription", ImmutableList.of("true"))); @@ -137,22 +139,36 @@ private Optional getEntityMetadataStats(String title, EntityType enti if (entityType == EntityType.DOMAIN) { // Don't show percent with domain when asking for stats regarding domains bodyText = - String.format("%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription); + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription); } else { int numEntitiesWithDomains = getNumEntitiesFiltered(index, ImmutableMap.of("hasDomain", ImmutableList.of("true"))); double percentWithDomains = 100.0 * numEntitiesWithDomains / numEntities; - bodyText = String.format( - "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", - percentWithOwners, percentWithTags, percentWithGlossaryTerms, percentWithDescription, percentWithDomains); + bodyText = + String.format( + "%.2f%% have owners, %.2f%% have tags, %.2f%% have glossary terms, %.2f%% have description, %.2f%% have domain assigned!", + percentWithOwners, + percentWithTags, + percentWithGlossaryTerms, + percentWithDescription, + percentWithDomains); } } - return Optional.of(Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); + return Optional.of( + Highlight.builder().setTitle(title).setValue(numEntities).setBody(bodyText).build()); } private int getNumEntitiesFiltered(String index, Map> filters) { - return _analyticsService.getHighlights(index, Optional.empty(), filters, - ImmutableMap.of("removed", ImmutableList.of("true")), Optional.empty()); + return _analyticsService.getHighlights( + index, + Optional.empty(), + filters, + ImmutableMap.of("removed", ImmutableList.of("true")), + Optional.empty()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java index f61c2eb77739bf..31a8359f8f0e3a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/GetMetadataAnalyticsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.analytics.resolver; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -30,12 +32,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - -/** - * Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. - */ +/** Retrieves the Charts to be rendered of the Analytics screen of the DataHub application. */ @RequiredArgsConstructor @Slf4j public final class GetMetadataAnalyticsResolver implements DataFetcher> { @@ -45,7 +42,8 @@ public final class GetMetadataAnalyticsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final Authentication authentication = ResolverUtils.getAuthentication(environment); - final MetadataAnalyticsInput input = bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); + final MetadataAnalyticsInput input = + bindArgument(environment.getArgument("input"), MetadataAnalyticsInput.class); try { final AnalyticsChartGroup group = new AnalyticsChartGroup(); @@ -59,7 +57,8 @@ public final List get(DataFetchingEnvironment environment) } } - private List getCharts(MetadataAnalyticsInput input, Authentication authentication) throws Exception { + private List getCharts( + MetadataAnalyticsInput input, Authentication authentication) throws Exception { final List charts = new ArrayList<>(); List entities = Collections.emptyList(); @@ -77,48 +76,76 @@ private List getCharts(MetadataAnalyticsInput input, Authenticat filter = QueryUtils.newFilter("domains.keyword", input.getDomain()); } - SearchResult searchResult = _entityClient.searchAcrossEntities(entities, query, filter, 0, 0, - null, null, authentication); + SearchResult searchResult = + _entityClient.searchAcrossEntities( + entities, query, filter, 0, 0, null, null, authentication); - List aggregationMetadataList = searchResult.getMetadata().getAggregations(); + List aggregationMetadataList = + searchResult.getMetadata().getAggregations(); Optional domainAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("domains")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("domains")) + .findFirst(); if (StringUtils.isEmpty(input.getDomain()) && domainAggregation.isPresent()) { List domainChart = buildBarChart(domainAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, domainChart, Constants.DOMAIN_ENTITY_NAME, - ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), AnalyticsUtil::getDomainName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + domainChart, + Constants.DOMAIN_ENTITY_NAME, + ImmutableSet.of(Constants.DOMAIN_PROPERTIES_ASPECT_NAME), + AnalyticsUtil::getDomainName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Domain").setBars(domainChart).build()); } Optional platformAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("platform")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("platform")) + .findFirst(); if (platformAggregation.isPresent()) { List platformChart = buildBarChart(platformAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, platformChart, Constants.DATA_PLATFORM_ENTITY_NAME, - ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), AnalyticsUtil::getPlatformName, authentication); - charts.add(BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + platformChart, + Constants.DATA_PLATFORM_ENTITY_NAME, + ImmutableSet.of(Constants.DATA_PLATFORM_INFO_ASPECT_NAME), + AnalyticsUtil::getPlatformName, + authentication); + charts.add( + BarChart.builder().setTitle("Entities by Platform").setBars(platformChart).build()); } Optional termAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("glossaryTerms")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("glossaryTerms")) + .findFirst(); if (termAggregation.isPresent()) { List termChart = buildBarChart(termAggregation.get()); - AnalyticsUtil.hydrateDisplayNameForBars(_entityClient, termChart, Constants.GLOSSARY_TERM_ENTITY_NAME, - ImmutableSet.of(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), AnalyticsUtil::getTermName, authentication); + AnalyticsUtil.hydrateDisplayNameForBars( + _entityClient, + termChart, + Constants.GLOSSARY_TERM_ENTITY_NAME, + ImmutableSet.of( + Constants.GLOSSARY_TERM_KEY_ASPECT_NAME, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + AnalyticsUtil::getTermName, + authentication); charts.add(BarChart.builder().setTitle("Entities by Term").setBars(termChart).build()); } Optional envAggregation = - aggregationMetadataList.stream().filter(metadata -> metadata.getName().equals("origin")).findFirst(); + aggregationMetadataList.stream() + .filter(metadata -> metadata.getName().equals("origin")) + .findFirst(); if (envAggregation.isPresent()) { List termChart = buildBarChart(envAggregation.get()); if (termChart.size() > 1) { - charts.add(BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); + charts.add( + BarChart.builder().setTitle("Entities by Environment").setBars(termChart).build()); } } @@ -126,16 +153,20 @@ private List getCharts(MetadataAnalyticsInput input, Authenticat } private List buildBarChart(AggregationMetadata aggregation) { - return aggregation.getAggregations() - .entrySet() - .stream() + return aggregation.getAggregations().entrySet().stream() .sorted(Collections.reverseOrder(Map.Entry.comparingByValue())) .limit(10) - .map(entry -> NamedBar.builder() - .setName(entry.getKey()) - .setSegments(ImmutableList.of( - BarSegment.builder().setLabel("#Entities").setValue(entry.getValue().intValue()).build())) - .build()) + .map( + entry -> + NamedBar.builder() + .setName(entry.getKey()) + .setSegments( + ImmutableList.of( + BarSegment.builder() + .setLabel("#Entities") + .setValue(entry.getValue().intValue()) + .build())) + .build()) .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java index 8e3bffc9ccf08d..c7f5c0bbc63eb5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/resolver/IsAnalyticsEnabledResolver.java @@ -3,20 +3,17 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -/** - * Returns true if analytics feature flag is enabled, false otherwise. - */ +/** Returns true if analytics feature flag is enabled, false otherwise. */ public class IsAnalyticsEnabledResolver implements DataFetcher { private final Boolean _isAnalyticsEnabled; public IsAnalyticsEnabledResolver(final Boolean isAnalyticsEnabled) { - _isAnalyticsEnabled = isAnalyticsEnabled; + _isAnalyticsEnabled = isAnalyticsEnabled; } @Override public final Boolean get(DataFetchingEnvironment environment) throws Exception { - return _isAnalyticsEnabled; + return _isAnalyticsEnabled; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java index 4135a7b0da1482..03333bda05f61f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsService.java @@ -40,7 +40,6 @@ import org.opensearch.search.aggregations.metrics.Cardinality; import org.opensearch.search.builder.SearchSourceBuilder; - @Slf4j @RequiredArgsConstructor public class AnalyticsService { @@ -72,25 +71,35 @@ public String getUsageIndexName() { return _indexConvention.getIndexName(DATAHUB_USAGE_EVENT_INDEX); } - public List getTimeseriesChart(String indexName, DateRange dateRange, DateInterval granularity, + public List getTimeseriesChart( + String indexName, + DateRange dateRange, + DateInterval granularity, Optional dimension, // Length 1 for now - Map> filters, Map> mustNotFilters, Optional uniqueOn) { + Map> filters, + Map> mustNotFilters, + Optional uniqueOn) { log.debug( - String.format("Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", - indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + String.format("filters: %s, uniqueOn: %s", filters, - uniqueOn)); - - AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); - - AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(DATE_HISTOGRAM) - .field("timestamp") - .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); + String.format( + "Invoked getTimeseriesChart with indexName: %s, dateRange: %s to %s, granularity: %s, dimension: %s,", + indexName, dateRange.getStart(), dateRange.getEnd(), granularity, dimension) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + + AggregationBuilder filteredAgg = + getFilteredAggregation(filters, mustNotFilters, Optional.of(dateRange)); + + AggregationBuilder dateHistogram = + AggregationBuilders.dateHistogram(DATE_HISTOGRAM) + .field("timestamp") + .calendarInterval(new DateHistogramInterval(granularity.name().toLowerCase())); uniqueOn.ifPresent(s -> dateHistogram.subAggregation(getUniqueQuery(s))); if (dimension.isPresent()) { filteredAgg.subAggregation( - AggregationBuilders.terms(DIMENSION).field(dimension.get()).subAggregation(dateHistogram)); + AggregationBuilders.terms(DIMENSION) + .field(dimension.get()) + .subAggregation(dateHistogram)); } else { filteredAgg.subAggregation(dateHistogram); } @@ -99,39 +108,55 @@ public List getTimeseriesChart(String indexName, DateRange dateRange, Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { if (dimension.isPresent()) { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedLine(bucket.getKeyAsString(), - extractPointsFromAggregations(bucket.getAggregations(), uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedLine( + bucket.getKeyAsString(), + extractPointsFromAggregations( + bucket.getAggregations(), uniqueOn.isPresent()))) .collect(Collectors.toList()); } else { return ImmutableList.of( - new NamedLine("total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); + new NamedLine( + "total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent()))); } } catch (Exception e) { - log.error(String.format("Caught exception while getting time series chart: %s", e.getMessage())); + log.error( + String.format("Caught exception while getting time series chart: %s", e.getMessage())); return ImmutableList.of(); } } private int extractCount(MultiBucketsAggregation.Bucket bucket, boolean didUnique) { - return didUnique ? (int) bucket.getAggregations().get(UNIQUE).getValue() : (int) bucket.getDocCount(); + return didUnique + ? (int) bucket.getAggregations().get(UNIQUE).getValue() + : (int) bucket.getDocCount(); } - private List extractPointsFromAggregations(Aggregations aggregations, boolean didUnique) { - return aggregations.get(DATE_HISTOGRAM).getBuckets() - .stream() - .map(bucket -> new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) + private List extractPointsFromAggregations( + Aggregations aggregations, boolean didUnique) { + return aggregations.get(DATE_HISTOGRAM).getBuckets().stream() + .map( + bucket -> + new NumericDataPoint(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } - public List getBarChart(String indexName, Optional dateRange, List dimensions, + public List getBarChart( + String indexName, + Optional dateRange, + List dimensions, // Length 1 or 2 - Map> filters, Map> mustNotFilters, Optional uniqueOn, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn, boolean showMissing) { log.debug( - String.format("Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", indexName, dateRange, - dimensions) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,", + indexName, dateRange, dimensions) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); assert (dimensions.size() == 1 || dimensions.size() == 2); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); @@ -142,7 +167,8 @@ public List getBarChart(String indexName, Optional dateRang } if (dimensions.size() == 2) { - TermsAggregationBuilder secondTermAgg = AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); + TermsAggregationBuilder secondTermAgg = + AggregationBuilders.terms(SECOND_DIMENSION).field(dimensions.get(1)); if (showMissing) { secondTermAgg.missing(NA); } @@ -161,14 +187,24 @@ public List getBarChart(String indexName, Optional dateRang List barSegments = extractBarSegmentsFromAggregations(aggregationResult, DIMENSION, uniqueOn.isPresent()); return barSegments.stream() - .map(segment -> new NamedBar(segment.getLabel(), - ImmutableList.of(BarSegment.builder().setLabel("Count").setValue(segment.getValue()).build()))) + .map( + segment -> + new NamedBar( + segment.getLabel(), + ImmutableList.of( + BarSegment.builder() + .setLabel("Count") + .setValue(segment.getValue()) + .build()))) .collect(Collectors.toList()); } else { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> new NamedBar(bucket.getKeyAsString(), - extractBarSegmentsFromAggregations(bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + new NamedBar( + bucket.getKeyAsString(), + extractBarSegmentsFromAggregations( + bucket.getAggregations(), SECOND_DIMENSION, uniqueOn.isPresent()))) .collect(Collectors.toList()); } } catch (Exception e) { @@ -177,31 +213,41 @@ public List getBarChart(String indexName, Optional dateRang } } - private List extractBarSegmentsFromAggregations(Aggregations aggregations, String aggregationKey, - boolean didUnique) { - return aggregations.get(aggregationKey).getBuckets() - .stream() + private List extractBarSegmentsFromAggregations( + Aggregations aggregations, String aggregationKey, boolean didUnique) { + return aggregations.get(aggregationKey).getBuckets().stream() .map(bucket -> new BarSegment(bucket.getKeyAsString(), extractCount(bucket, didUnique))) .collect(Collectors.toList()); } public Row buildRow(String groupByValue, Function groupByValueToCell, int count) { List values = ImmutableList.of(groupByValue, String.valueOf(count)); - List cells = ImmutableList.of(groupByValueToCell.apply(groupByValue), - Cell.builder().setValue(String.valueOf(count)).build()); + List cells = + ImmutableList.of( + groupByValueToCell.apply(groupByValue), + Cell.builder().setValue(String.valueOf(count)).build()); return new Row(values, cells); } - public List getTopNTableChart(String indexName, Optional dateRange, String groupBy, - Map> filters, Map> mustNotFilters, Optional uniqueOn, - int maxRows, Function groupByValueToCell) { + public List getTopNTableChart( + String indexName, + Optional dateRange, + String groupBy, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn, + int maxRows, + Function groupByValueToCell) { log.debug( - String.format("Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", indexName, dateRange, - groupBy) + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s", + indexName, dateRange, groupBy) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); - TermsAggregationBuilder termAgg = AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); + TermsAggregationBuilder termAgg = + AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows); if (uniqueOn.isPresent()) { termAgg.order(BucketOrder.aggregation(UNIQUE, false)); termAgg.subAggregation(getUniqueQuery(uniqueOn.get())); @@ -212,10 +258,13 @@ public List getTopNTableChart(String indexName, Optional dateRan Aggregations aggregationResult = executeAndExtract(searchRequest).getAggregations(); try { - return aggregationResult.get(DIMENSION).getBuckets() - .stream() - .map(bucket -> buildRow(bucket.getKeyAsString(), groupByValueToCell, - extractCount(bucket, uniqueOn.isPresent()))) + return aggregationResult.get(DIMENSION).getBuckets().stream() + .map( + bucket -> + buildRow( + bucket.getKeyAsString(), + groupByValueToCell, + extractCount(bucket, uniqueOn.isPresent()))) .collect(Collectors.toList()); } catch (Exception e) { log.error(String.format("Caught exception while getting top n chart: %s", e.getMessage())); @@ -223,11 +272,16 @@ public List getTopNTableChart(String indexName, Optional dateRan } } - public int getHighlights(String indexName, Optional dateRange, Map> filters, - Map> mustNotFilters, Optional uniqueOn) { + public int getHighlights( + String indexName, + Optional dateRange, + Map> filters, + Map> mustNotFilters, + Optional uniqueOn) { log.debug( - String.format("Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + String.format( - "filters: %s, uniqueOn: %s", filters, uniqueOn)); + String.format( + "Invoked getHighlights with indexName: %s, dateRange: %s", indexName, dateRange) + + String.format("filters: %s, uniqueOn: %s", filters, uniqueOn)); AggregationBuilder filteredAgg = getFilteredAggregation(filters, mustNotFilters, dateRange); uniqueOn.ifPresent(s -> filteredAgg.subAggregation(getUniqueQuery(s))); @@ -246,7 +300,8 @@ public int getHighlights(String indexName, Optional dateRange, Mapget(FILTERED); } catch (Exception e) { @@ -266,11 +322,14 @@ private Filter executeAndExtract(SearchRequest searchRequest) { } } - private AggregationBuilder getFilteredAggregation(Map> mustFilters, - Map> mustNotFilters, Optional dateRange) { + private AggregationBuilder getFilteredAggregation( + Map> mustFilters, + Map> mustNotFilters, + Optional dateRange) { BoolQueryBuilder filteredQuery = QueryBuilders.boolQuery(); mustFilters.forEach((key, values) -> filteredQuery.must(QueryBuilders.termsQuery(key, values))); - mustNotFilters.forEach((key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); + mustNotFilters.forEach( + (key, values) -> filteredQuery.mustNot(QueryBuilders.termsQuery(key, values))); dateRange.ifPresent(range -> filteredQuery.must(dateRangeQuery(range))); return AggregationBuilders.filter(FILTERED, filteredQuery); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java index 42f4e25c010efa..be7f4d2f0897ab 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/analytics/service/AnalyticsUtil.java @@ -31,16 +31,17 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.tuple.Pair; - @Slf4j public class AnalyticsUtil { - private AnalyticsUtil() { - } + private AnalyticsUtil() {} public static Cell buildCellWithSearchLandingPage(String query) { Cell result = new Cell(); result.setValue(query); - result.setLinkParams(LinkParams.builder().setSearchParams(SearchParams.builder().setQuery(query).build()).build()); + result.setLinkParams( + LinkParams.builder() + .setSearchParams(SearchParams.builder().setQuery(query).build()) + .build()); return result; } @@ -50,70 +51,138 @@ public static Cell buildCellWithEntityLandingPage(String urn) { try { Entity entity = UrnToEntityMapper.map(Urn.createFromString(urn)); result.setEntity(entity); - result.setLinkParams(LinkParams.builder() - .setEntityProfileParams(EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) - .build()); + result.setLinkParams( + LinkParams.builder() + .setEntityProfileParams( + EntityProfileParams.builder().setUrn(urn).setType(entity.getType()).build()) + .build()); } catch (URISyntaxException e) { log.error("Malformed urn {} in table", urn, e); } return result; } - public static void hydrateDisplayNameForBars(EntityClient entityClient, List bars, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { + public static void hydrateDisplayNameForBars( + EntityClient entityClient, + List bars, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { Map urnToDisplayName = - getUrnToDisplayName(entityClient, bars.stream().map(NamedBar::getName).collect(Collectors.toList()), entityName, - aspectNames, extractDisplayName, authentication); + getUrnToDisplayName( + entityClient, + bars.stream().map(NamedBar::getName).collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.setName(urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); + bars.forEach( + namedBar -> + namedBar.setName( + urnToDisplayName.getOrDefault(namedBar.getName(), namedBar.getName()))); } - public static void hydrateDisplayNameForSegments(EntityClient entityClient, List bars, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Map urnToDisplayName = getUrnToDisplayName(entityClient, - bars.stream().flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)).collect(Collectors.toList()), - entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForSegments( + EntityClient entityClient, + List bars, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { + Map urnToDisplayName = + getUrnToDisplayName( + entityClient, + bars.stream() + .flatMap(bar -> bar.getSegments().stream().map(BarSegment::getLabel)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - bars.forEach(namedBar -> namedBar.getSegments() - .forEach(segment -> segment.setLabel(urnToDisplayName.getOrDefault(segment.getLabel(), segment.getLabel())))); + bars.forEach( + namedBar -> + namedBar + .getSegments() + .forEach( + segment -> + segment.setLabel( + urnToDisplayName.getOrDefault( + segment.getLabel(), segment.getLabel())))); } - public static void hydrateDisplayNameForTable(EntityClient entityClient, List rows, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Map urnToDisplayName = getUrnToDisplayName(entityClient, rows.stream() - .flatMap(row -> row.getCells().stream().filter(cell -> cell.getEntity() != null).map(Cell::getValue)) - .collect(Collectors.toList()), entityName, aspectNames, extractDisplayName, authentication); + public static void hydrateDisplayNameForTable( + EntityClient entityClient, + List rows, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { + Map urnToDisplayName = + getUrnToDisplayName( + entityClient, + rows.stream() + .flatMap( + row -> + row.getCells().stream() + .filter(cell -> cell.getEntity() != null) + .map(Cell::getValue)) + .collect(Collectors.toList()), + entityName, + aspectNames, + extractDisplayName, + authentication); // For each urn, try to find it's name, use the urn if not found - rows.forEach(row -> row.getCells().forEach(cell -> { - if (cell.getEntity() != null) { - cell.setValue(urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); - } - })); + rows.forEach( + row -> + row.getCells() + .forEach( + cell -> { + if (cell.getEntity() != null) { + cell.setValue( + urnToDisplayName.getOrDefault(cell.getValue(), cell.getValue())); + } + })); } - public static Map getUrnToDisplayName(EntityClient entityClient, List urns, String entityName, - Set aspectNames, Function> extractDisplayName, - Authentication authentication) throws Exception { - Set uniqueUrns = urns.stream().distinct().map(urnStr -> { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - return null; - } - }).filter(Objects::nonNull).collect(Collectors.toSet()); - Map aspects = entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); - return aspects.entrySet() - .stream() - .map(entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) + public static Map getUrnToDisplayName( + EntityClient entityClient, + List urns, + String entityName, + Set aspectNames, + Function> extractDisplayName, + Authentication authentication) + throws Exception { + Set uniqueUrns = + urns.stream() + .distinct() + .map( + urnStr -> { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toSet()); + Map aspects = + entityClient.batchGetV2(entityName, uniqueUrns, aspectNames, authentication); + return aspects.entrySet().stream() + .map( + entry -> Pair.of(entry.getKey().toString(), extractDisplayName.apply(entry.getValue()))) .filter(pair -> pair.getValue().isPresent()) .collect(Collectors.toMap(Pair::getKey, pair -> pair.getValue().get())); } public static Optional getDomainName(EntityResponse entityResponse) { - EnvelopedAspect domainProperties = entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + EnvelopedAspect domainProperties = + entityResponse.getAspects().get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (domainProperties == null) { return Optional.empty(); } @@ -126,13 +195,17 @@ public static Optional getPlatformName(EntityResponse entityResponse) { if (envelopedDataPlatformInfo == null) { return Optional.empty(); } - DataPlatformInfo dataPlatformInfo = new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); + DataPlatformInfo dataPlatformInfo = + new DataPlatformInfo(envelopedDataPlatformInfo.getValue().data()); return Optional.of( - dataPlatformInfo.getDisplayName() == null ? dataPlatformInfo.getName() : dataPlatformInfo.getDisplayName()); + dataPlatformInfo.getDisplayName() == null + ? dataPlatformInfo.getName() + : dataPlatformInfo.getDisplayName()); } public static Optional getDatasetName(EntityResponse entityResponse) { - EnvelopedAspect envelopedDatasetKey = entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); + EnvelopedAspect envelopedDatasetKey = + entityResponse.getAspects().get(Constants.DATASET_KEY_ASPECT_NAME); if (envelopedDatasetKey == null) { return Optional.empty(); } @@ -141,7 +214,8 @@ public static Optional getDatasetName(EntityResponse entityResponse) { } public static Optional getTermName(EntityResponse entityResponse) { - EnvelopedAspect envelopedTermInfo = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); + EnvelopedAspect envelopedTermInfo = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME); if (envelopedTermInfo != null) { GlossaryTermInfo glossaryTermInfo = new GlossaryTermInfo(envelopedTermInfo.getValue().data()); if (glossaryTermInfo.hasName()) { @@ -150,11 +224,13 @@ public static Optional getTermName(EntityResponse entityResponse) { } // if name is not set on GlossaryTermInfo or there is no GlossaryTermInfo - EnvelopedAspect envelopedGlossaryTermKey = entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); + EnvelopedAspect envelopedGlossaryTermKey = + entityResponse.getAspects().get(Constants.GLOSSARY_TERM_KEY_ASPECT_NAME); if (envelopedGlossaryTermKey == null) { return Optional.empty(); } - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); + GlossaryTermKey glossaryTermKey = + new GlossaryTermKey(envelopedGlossaryTermKey.getValue().data()); return Optional.of(glossaryTermKey.getName()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index 03e63c7fb472fa..6ba3777d476cb6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -1,36 +1,37 @@ package com.linkedin.datahub.graphql.authorization; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthUtil; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; - import java.time.Clock; import java.util.List; import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class AuthorizationUtils { private static final Clock CLOCK = Clock.systemUTC(); public static AuditStamp createAuditStamp(@Nonnull QueryContext context) { - return new AuditStamp().setTime(CLOCK.millis()).setActor(UrnUtils.getUrn(context.getActorUrn())); + return new AuditStamp() + .setTime(CLOCK.millis()) + .setActor(UrnUtils.getUrn(context.getActorUrn())); } public static boolean canManageUsersAndGroups(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } public static boolean canManagePolicies(@Nonnull QueryContext context) { @@ -38,7 +39,8 @@ public static boolean canManagePolicies(@Nonnull QueryContext context) { } public static boolean canGeneratePersonalAccessToken(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } public static boolean canManageTokens(@Nonnull QueryContext context) { @@ -46,21 +48,20 @@ public static boolean canManageTokens(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Domains. This is true if the user has the 'Manage Domains' or 'Create Domains' platform privilege. + * Returns true if the current used is able to create Domains. This is true if the user has the + * 'Manage Domains' or 'Create Domains' platform privilege. */ public static boolean canCreateDomains(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_DOMAINS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageDomains(@Nonnull QueryContext context) { @@ -68,21 +69,20 @@ public static boolean canManageDomains(@Nonnull QueryContext context) { } /** - * Returns true if the current used is able to create Tags. This is true if the user has the 'Manage Tags' or 'Create Tags' platform privilege. + * Returns true if the current used is able to create Tags. This is true if the user has the + * 'Manage Tags' or 'Create Tags' platform privilege. */ public static boolean canCreateTags(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.CREATE_TAGS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageTags(@Nonnull QueryContext context) { @@ -90,48 +90,59 @@ public static boolean canManageTags(@Nonnull QueryContext context) { } public static boolean canDeleteEntity(@Nonnull Urn entityUrn, @Nonnull QueryContext context) { - return isAuthorized(context, Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), PoliciesConfig.DELETE_ENTITY_PRIVILEGE); + return isAuthorized( + context, + Optional.of(new EntitySpec(entityUrn.getEntityType(), entityUrn.toString())), + PoliciesConfig.DELETE_ENTITY_PRIVILEGE); } public static boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); + return isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } - public static boolean canEditGroupMembers(@Nonnull String groupUrnStr, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); + public static boolean canEditGroupMembers( + @Nonnull String groupUrnStr, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_GROUP_MEMBERS_PRIVILEGE.getType())))); - return AuthorizationUtils.isAuthorized(context.getAuthorizer(), context.getActorUrn(), CORP_GROUP_ENTITY_NAME, - groupUrnStr, orPrivilegeGroups); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + CORP_GROUP_ENTITY_NAME, + groupUrnStr, + orPrivilegeGroups); } public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())), + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalAnnouncements(@Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.MANAGE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - orPrivilegeGroups); + context.getAuthorizer(), context.getActorUrn(), orPrivilegeGroups); } public static boolean canManageGlobalViews(@Nonnull QueryContext context) { @@ -142,31 +153,39 @@ public static boolean canManageOwnershipTypes(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOBAL_OWNERSHIP_TYPES); } - public static boolean canEditEntityQueries(@Nonnull List entityUrns, @Nonnull QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup( - ImmutableList.of(ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); - return entityUrns.stream().allMatch(entityUrn -> - isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - entityUrn.getEntityType(), - entityUrn.toString(), - orPrivilegeGroups - )); - } - - public static boolean canCreateQuery(@Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canEditEntityQueries( + @Nonnull List entityUrns, @Nonnull QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType())))); + return entityUrns.stream() + .allMatch( + entityUrn -> + isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + entityUrn.getEntityType(), + entityUrn.toString(), + orPrivilegeGroups)); + } + + public static boolean canCreateQuery( + @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to create a query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canUpdateQuery(@Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canUpdateQuery( + @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to update any query. return canEditEntityQueries(subjectUrns, context); } - public static boolean canDeleteQuery(@Nonnull Urn entityUrn, @Nonnull List subjectUrns, @Nonnull QueryContext context) { + public static boolean canDeleteQuery( + @Nonnull Urn entityUrn, @Nonnull List subjectUrns, @Nonnull QueryContext context) { // Currently - you only need permission to edit an entity's queries to remove any query. return canEditEntityQueries(subjectUrns, context); } @@ -177,15 +196,16 @@ public static boolean isAuthorized( @Nonnull PoliciesConfig.Privilege privilege) { final Authorizer authorizer = context.getAuthorizer(); final String actor = context.getActorUrn(); - final ConjunctivePrivilegeGroup andGroup = new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); - return AuthUtil.isAuthorized(authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); + final ConjunctivePrivilegeGroup andGroup = + new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())); + return AuthUtil.isAuthorized( + authorizer, actor, resourceSpec, new DisjunctivePrivilegeGroup(ImmutableList.of(andGroup))); } public static boolean isAuthorized( @Nonnull Authorizer authorizer, @Nonnull String actor, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { return AuthUtil.isAuthorized(authorizer, actor, Optional.empty(), privilegeGroup); } @@ -194,13 +214,10 @@ public static boolean isAuthorized( @Nonnull String actor, @Nonnull String resourceType, @Nonnull String resource, - @Nonnull DisjunctivePrivilegeGroup privilegeGroup - ) { + @Nonnull DisjunctivePrivilegeGroup privilegeGroup) { final EntitySpec resourceSpec = new EntitySpec(resourceType, resource); return AuthUtil.isAuthorized(authorizer, actor, Optional.of(resourceSpec), privilegeGroup); } - private AuthorizationUtils() { } - + private AuthorizationUtils() {} } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java index a09dc8741cd29c..69e0ed0625b2fa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthenticationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthenticationException extends GraphQLException { - public AuthenticationException(String message) { - super(message); - } + public AuthenticationException(String message) { + super(message); + } - public AuthenticationException(String message, Throwable cause) { - super(message, cause); - } + public AuthenticationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java index 803af09e079d1a..30568e45938c15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/AuthorizationException.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.exception; - -/** - * Exception thrown when authentication fails. - */ +/** Exception thrown when authentication fails. */ public class AuthorizationException extends DataHubGraphQLException { public AuthorizationException(String message) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java index 8d3f5d5cea9eb3..746ce0cdc10fe1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubDataFetcherExceptionHandler.java @@ -12,34 +12,49 @@ @Slf4j public class DataHubDataFetcherExceptionHandler implements DataFetcherExceptionHandler { + private static final String DEFAULT_ERROR_MESSAGE = "An unknown error occurred."; + @Override - public DataFetcherExceptionHandlerResult onException(DataFetcherExceptionHandlerParameters handlerParameters) { + public DataFetcherExceptionHandlerResult onException( + DataFetcherExceptionHandlerParameters handlerParameters) { Throwable exception = handlerParameters.getException(); SourceLocation sourceLocation = handlerParameters.getSourceLocation(); ResultPath path = handlerParameters.getPath(); - log.error("Failed to execute DataFetcher", exception); - DataHubGraphQLErrorCode errorCode = DataHubGraphQLErrorCode.SERVER_ERROR; - String message = "An unknown error occurred."; + String message = DEFAULT_ERROR_MESSAGE; - // note: make sure to access the true error message via `getCause()` - if (exception.getCause() instanceof IllegalArgumentException) { + IllegalArgumentException illException = + findFirstThrowableCauseOfClass(exception, IllegalArgumentException.class); + if (illException != null) { + log.error("Failed to execute", illException); errorCode = DataHubGraphQLErrorCode.BAD_REQUEST; - message = exception.getCause().getMessage(); + message = illException.getMessage(); } - if (exception instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception).errorCode(); - message = exception.getMessage(); + DataHubGraphQLException graphQLException = + findFirstThrowableCauseOfClass(exception, DataHubGraphQLException.class); + if (graphQLException != null) { + log.error("Failed to execute", graphQLException); + errorCode = graphQLException.errorCode(); + message = graphQLException.getMessage(); } - if (exception.getCause() instanceof DataHubGraphQLException) { - errorCode = ((DataHubGraphQLException) exception.getCause()).errorCode(); - message = exception.getCause().getMessage(); + if (illException == null && graphQLException == null) { + log.error("Failed to execute", exception); } - DataHubGraphQLError error = new DataHubGraphQLError(message, path, sourceLocation, errorCode); return DataFetcherExceptionHandlerResult.newResult().error(error).build(); } + + T findFirstThrowableCauseOfClass(Throwable throwable, Class clazz) { + while (throwable != null) { + if (clazz.isInstance(throwable)) { + return (T) throwable; + } else { + throwable = throwable.getCause(); + } + } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java index 15c539a608cc05..f007a8b7c7adbe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.exception; +import static graphql.Assert.*; + import graphql.ErrorType; import graphql.GraphQLError; import graphql.GraphqlErrorHelper; @@ -11,9 +13,6 @@ import java.util.List; import java.util.Map; -import static graphql.Assert.*; - - @PublicApi public class DataHubGraphQLError implements GraphQLError { @@ -23,7 +22,11 @@ public class DataHubGraphQLError implements GraphQLError { private final List locations; private final Map extensions; - public DataHubGraphQLError(String message, ResultPath path, SourceLocation sourceLocation, DataHubGraphQLErrorCode errorCode) { + public DataHubGraphQLError( + String message, + ResultPath path, + SourceLocation sourceLocation, + DataHubGraphQLErrorCode errorCode) { this.path = assertNotNull(path).toList(); this.errorCode = assertNotNull(errorCode); this.locations = Collections.singletonList(sourceLocation); @@ -90,4 +93,3 @@ public int hashCode() { return GraphqlErrorHelper.hashCode(this); } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java index 3d3c54e2febb29..75096a8c4148e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLException.java @@ -2,7 +2,6 @@ import graphql.GraphQLException; - public class DataHubGraphQLException extends GraphQLException { private final DataHubGraphQLErrorCode code; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java index 2ee9838af54287..87a1aebb02f2e3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/ValidationException.java @@ -2,16 +2,14 @@ import graphql.GraphQLException; -/** - * Exception thrown when an unexpected value is provided by the client. - */ +/** Exception thrown when an unexpected value is provided by the client. */ public class ValidationException extends GraphQLException { - public ValidationException(String message) { - super(message); - } + public ValidationException(String message) { + super(message); + } - public ValidationException(String message, Throwable cause) { - super(message, cause); - } + public ValidationException(String message, Throwable cause) { + super(message, cause); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java index 4d6133f18df050..e74ed09849763c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/featureflags/FeatureFlags.java @@ -3,7 +3,6 @@ import com.linkedin.metadata.config.PreProcessHooks; import lombok.Data; - @Data public class FeatureFlags { private boolean showSimplifiedHomepageByDefault = false; @@ -13,6 +12,7 @@ public class FeatureFlags { private boolean readOnlyModeEnabled = false; private boolean showSearchFiltersV2 = false; private boolean showBrowseV2 = false; + private boolean platformBrowseV2 = false; private PreProcessHooks preProcessHooks; private boolean showAcrylInfo = false; private boolean showAccessManagement = false; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java index e228cb8445c02f..9faf00e0211bda 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthUtils.java @@ -1,26 +1,25 @@ package com.linkedin.datahub.graphql.resolvers; -import com.google.common.collect.ImmutableList; +import com.datahub.authorization.AuthorizationRequest; +import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableList; import com.linkedin.metadata.authorization.PoliciesConfig; import java.util.List; import java.util.Optional; -import com.datahub.authorization.AuthorizationRequest; -import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; public class AuthUtils { - public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + public static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static boolean isAuthorized( - String principal, - List privilegeGroup, - Authorizer authorizer) { + String principal, List privilegeGroup, Authorizer authorizer) { for (final String privilege : privilegeGroup) { - final AuthorizationRequest request = new AuthorizationRequest(principal, privilege, Optional.empty()); + final AuthorizationRequest request = + new AuthorizationRequest(principal, privilege, Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); if (AuthorizationResult.Type.DENY.equals(result.getType())) { return false; @@ -29,6 +28,5 @@ public static boolean isAuthorized( return true; } - - private AuthUtils() { } + private AuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java index 2520b55c24e253..570ea322be7a53 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/AuthenticatedResolver.java @@ -2,29 +2,28 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthenticationException; - import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - /** - * Checks whether the user is currently authenticated & if so delegates execution to a child resolver. + * Checks whether the user is currently authenticated & if so delegates execution to a child + * resolver. */ @Deprecated public final class AuthenticatedResolver implements DataFetcher { - private final DataFetcher _resolver; + private final DataFetcher _resolver; - public AuthenticatedResolver(final DataFetcher resolver) { - _resolver = resolver; - } + public AuthenticatedResolver(final DataFetcher resolver) { + _resolver = resolver; + } - @Override - public final T get(DataFetchingEnvironment environment) throws Exception { - final QueryContext context = environment.getContext(); - if (context.isAuthenticated()) { - return _resolver.get(environment); - } - throw new AuthenticationException("Failed to authenticate the current user."); + @Override + public final T get(DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + if (context.isAuthenticated()) { + return _resolver.get(environment); } + throw new AuthenticationException("Failed to authenticate the current user."); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java index 930c98ee7113ad..5ab07701c15a23 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/BatchLoadUtils.java @@ -2,18 +2,17 @@ import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; -import org.dataloader.DataLoader; -import org.dataloader.DataLoaderRegistry; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import org.dataloader.DataLoader; +import org.dataloader.DataLoaderRegistry; public class BatchLoadUtils { - private BatchLoadUtils() { } + private BatchLoadUtils() {} public static CompletableFuture> batchLoadEntitiesOfSameType( List entities, @@ -24,9 +23,10 @@ public static CompletableFuture> batchLoadEntitiesOfSameType( } // Assume all entities are of the same type final com.linkedin.datahub.graphql.types.EntityType filteredEntity = - Iterables.getOnlyElement(entityTypes.stream() - .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) - .collect(Collectors.toList())); + Iterables.getOnlyElement( + entityTypes.stream() + .filter(entity -> entities.get(0).getClass().isAssignableFrom(entity.objectClass())) + .collect(Collectors.toList())); final DataLoader loader = dataLoaderRegistry.getDataLoader(filteredEntity.name()); List keyList = new ArrayList(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java index b0f23e63177e60..aba781f9e1dc70 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/EntityTypeMapper.java @@ -7,9 +7,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service Storage Entities + * This class is for mapping between friendly GraphQL EntityType Enum to the Metadata Service + * Storage Entities */ public class EntityTypeMapper { @@ -44,10 +44,10 @@ public class EntityTypeMapper { .build(); private static final Map ENTITY_NAME_TO_TYPE = - ENTITY_TYPE_TO_NAME.entrySet().stream().collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); + ENTITY_TYPE_TO_NAME.entrySet().stream() + .collect(Collectors.toMap(e -> e.getValue().toLowerCase(), Map.Entry::getKey)); - private EntityTypeMapper() { - } + private EntityTypeMapper() {} public static EntityType getType(String name) { String lowercaseName = name.toLowerCase(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java index 02921b453e3154..b480e287adb9bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/MeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -23,17 +26,12 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestionAuthUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * GraphQL resolver responsible for resolving information about the currently - * logged in User, including - * - * 1. User profile information - * 2. User privilege information, i.e. which features to display in the UI. + * GraphQL resolver responsible for resolving information about the currently logged in User, + * including * + *

1. User profile information 2. User privilege information, i.e. which features to display in + * the UI. */ public class MeResolver implements DataFetcher> { @@ -48,114 +46,123 @@ public MeResolver(final EntityClient entityClient, final FeatureFlags featureFla @Override public CompletableFuture get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - // 1. Get currently logged in user profile. - final Urn userUrn = Urn.createFromString(context.getActorUrn()); - final EntityResponse gmsUser = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - Collections.singleton(userUrn), null, context.getAuthentication()).get(userUrn); - final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); - - // 2. Get platform privileges - final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); - platformPrivileges.setViewAnalytics(canViewAnalytics(context)); - platformPrivileges.setManagePolicies(canManagePolicies(context)); - platformPrivileges.setManageIdentities(canManageUsersGroups(context)); - platformPrivileges.setGeneratePersonalAccessTokens(canGeneratePersonalAccessToken(context)); - platformPrivileges.setManageDomains(canManageDomains(context)); - platformPrivileges.setManageIngestion(canManageIngestion(context)); - platformPrivileges.setManageSecrets(canManageSecrets(context)); - platformPrivileges.setManageTokens(canManageTokens(context)); - platformPrivileges.setManageTests(canManageTests(context)); - platformPrivileges.setManageGlossaries(canManageGlossaries(context)); - platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); - platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); - platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); - platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); - platformPrivileges.setManageGlobalViews(AuthorizationUtils.canManageGlobalViews(context)); - platformPrivileges.setManageOwnershipTypes(AuthorizationUtils.canManageOwnershipTypes(context)); - platformPrivileges.setManageGlobalAnnouncements(AuthorizationUtils.canManageGlobalAnnouncements(context)); - - // Construct and return authenticated user object. - final AuthenticatedUser authUser = new AuthenticatedUser(); - authUser.setCorpUser(corpUser); - authUser.setPlatformPrivileges(platformPrivileges); - return authUser; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to fetch authenticated user!", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // 1. Get currently logged in user profile. + final Urn userUrn = Urn.createFromString(context.getActorUrn()); + final EntityResponse gmsUser = + _entityClient + .batchGetV2( + CORP_USER_ENTITY_NAME, + Collections.singleton(userUrn), + null, + context.getAuthentication()) + .get(userUrn); + final CorpUser corpUser = CorpUserMapper.map(gmsUser, _featureFlags); + + // 2. Get platform privileges + final PlatformPrivileges platformPrivileges = new PlatformPrivileges(); + platformPrivileges.setViewAnalytics(canViewAnalytics(context)); + platformPrivileges.setManagePolicies(canManagePolicies(context)); + platformPrivileges.setManageIdentities(canManageUsersGroups(context)); + platformPrivileges.setGeneratePersonalAccessTokens( + canGeneratePersonalAccessToken(context)); + platformPrivileges.setManageDomains(canManageDomains(context)); + platformPrivileges.setManageIngestion(canManageIngestion(context)); + platformPrivileges.setManageSecrets(canManageSecrets(context)); + platformPrivileges.setManageTokens(canManageTokens(context)); + platformPrivileges.setManageTests(canManageTests(context)); + platformPrivileges.setManageGlossaries(canManageGlossaries(context)); + platformPrivileges.setManageUserCredentials(canManageUserCredentials(context)); + platformPrivileges.setCreateDomains(AuthorizationUtils.canCreateDomains(context)); + platformPrivileges.setCreateTags(AuthorizationUtils.canCreateTags(context)); + platformPrivileges.setManageTags(AuthorizationUtils.canManageTags(context)); + platformPrivileges.setManageGlobalViews( + AuthorizationUtils.canManageGlobalViews(context)); + platformPrivileges.setManageOwnershipTypes( + AuthorizationUtils.canManageOwnershipTypes(context)); + platformPrivileges.setManageGlobalAnnouncements( + AuthorizationUtils.canManageGlobalAnnouncements(context)); + + // Construct and return authenticated user object. + final AuthenticatedUser authUser = new AuthenticatedUser(); + authUser.setCorpUser(corpUser); + authUser.setPlatformPrivileges(platformPrivileges); + return authUser; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to fetch authenticated user!", e); + } + }); } - /** - * Returns true if the authenticated user has privileges to view analytics. - */ + /** Returns true if the authenticated user has privileges to view analytics. */ private boolean canViewAnalytics(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.VIEW_ANALYTICS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage policies analytics. - */ + /** Returns true if the authenticated user has privileges to manage policies analytics. */ private boolean canManagePolicies(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_POLICIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage users & groups. - */ + /** Returns true if the authenticated user has privileges to manage users & groups. */ private boolean canManageUsersGroups(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.MANAGE_USERS_AND_GROUPS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to generate personal access tokens - */ + /** Returns true if the authenticated user has privileges to generate personal access tokens */ private boolean canGeneratePersonalAccessToken(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + PoliciesConfig.GENERATE_PERSONAL_ACCESS_TOKENS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage (add or remove) tests. - */ + /** Returns true if the authenticated user has privileges to manage (add or remove) tests. */ private boolean canManageTests(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage domains - */ + /** Returns true if the authenticated user has privileges to manage domains */ private boolean canManageDomains(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_DOMAINS_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage access tokens - */ + /** Returns true if the authenticated user has privileges to manage access tokens */ private boolean canManageTokens(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_ACCESS_TOKENS); } - /** - * Returns true if the authenticated user has privileges to manage glossaries - */ + /** Returns true if the authenticated user has privileges to manage glossaries */ private boolean canManageGlossaries(final QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return isAuthorized( + context.getAuthorizer(), context.getActorUrn(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } - /** - * Returns true if the authenticated user has privileges to manage user credentials - */ + /** Returns true if the authenticated user has privileges to manage user credentials */ private boolean canManageUserCredentials(@Nonnull QueryContext context) { - return isAuthorized(context.getAuthorizer(), context.getActorUrn(), + return isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), PoliciesConfig.MANAGE_USER_CREDENTIALS_PRIVILEGE); } /** * Returns true if the provided actor is authorized for a particular privilege, false otherwise. */ - private boolean isAuthorized(final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { - final AuthorizationRequest request = new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); + private boolean isAuthorized( + final Authorizer authorizer, String actor, PoliciesConfig.Privilege privilege) { + final AuthorizationRequest request = + new AuthorizationRequest(actor, privilege.getType(), Optional.empty()); final AuthorizationResult result = authorizer.authorize(request); return AuthorizationResult.Type.ALLOW.equals(result.getType()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java index 2c2e71ee92eaa4..244012d320b43f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.ObjectMapper; @@ -30,184 +32,198 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.metadata.Constants.*; - - public class ResolverUtils { - private static final Set KEYWORD_EXCLUDED_FILTERS = ImmutableSet.of( - "runId", - "_entityType" - ); - private static final ObjectMapper MAPPER = new ObjectMapper(); - static { - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); - MAPPER.getFactory().setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + private static final Set KEYWORD_EXCLUDED_FILTERS = + ImmutableSet.of("runId", "_entityType"); + private static final ObjectMapper MAPPER = new ObjectMapper(); + + static { + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + MAPPER + .getFactory() + .setStreamReadConstraints(StreamReadConstraints.builder().maxStringLength(maxSize).build()); + } + + private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); + + private ResolverUtils() {} + + @Nonnull + public static T bindArgument(Object argument, Class clazz) { + return MAPPER.convertValue(argument, clazz); + } + + /** + * Returns the string with the forward slash escaped More details on reserved characters in + * Elasticsearch can be found at, + * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters + */ + @Nonnull + public static String escapeForwardSlash(@Nonnull String input) { + if (input.contains("/")) { + input = input.replace("/", "\\\\/"); } - - private static final Logger _logger = LoggerFactory.getLogger(ResolverUtils.class.getName()); - - private ResolverUtils() { } - - @Nonnull - public static T bindArgument(Object argument, Class clazz) { - return MAPPER.convertValue(argument, clazz); + return input; + } + + @Nonnull + public static Authentication getAuthentication(DataFetchingEnvironment environment) { + return ((QueryContext) environment.getContext()).getAuthentication(); + } + + /** + * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure + * that it is matched against a keyword filter in ElasticSearch. + * @param facetFilterInputs The list of facet filters inputs + * @param validFacetFields The set of valid fields against which to filter for. + * @return A map of filter definitions to be used in ElasticSearch. + */ + @Nonnull + public static Map buildFacetFilters( + @Nullable List facetFilterInputs, @Nonnull Set validFacetFields) { + if (facetFilterInputs == null) { + return Collections.emptyMap(); } - /** - * Returns the string with the forward slash escaped - * More details on reserved characters in Elasticsearch can be found at, - * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters - */ - @Nonnull - public static String escapeForwardSlash(@Nonnull String input) { - if (input.contains("/")) { - input = input.replace("/", "\\\\/"); - } - return input; - } - - @Nonnull - public static Authentication getAuthentication(DataFetchingEnvironment environment) { - return ((QueryContext) environment.getContext()).getAuthentication(); - } - - /** - * @apiNote DO NOT use this method if the facet filters do not include `.keyword` suffix to ensure - * that it is matched against a keyword filter in ElasticSearch. - * - * @param facetFilterInputs The list of facet filters inputs - * @param validFacetFields The set of valid fields against which to filter for. - * @return A map of filter definitions to be used in ElasticSearch. - */ - @Nonnull - public static Map buildFacetFilters(@Nullable List facetFilterInputs, - @Nonnull Set validFacetFields) { - if (facetFilterInputs == null) { - return Collections.emptyMap(); - } - - final Map facetFilters = new HashMap<>(); - - facetFilterInputs.forEach(facetFilterInput -> { - if (!validFacetFields.contains(facetFilterInput.getField())) { - throw new ValidationException(String.format("Unrecognized facet with name %s provided", facetFilterInput.getField())); - } - if (!facetFilterInput.getValues().isEmpty()) { - facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); - } + final Map facetFilters = new HashMap<>(); + + facetFilterInputs.forEach( + facetFilterInput -> { + if (!validFacetFields.contains(facetFilterInput.getField())) { + throw new ValidationException( + String.format( + "Unrecognized facet with name %s provided", facetFilterInput.getField())); + } + if (!facetFilterInput.getValues().isEmpty()) { + facetFilters.put(facetFilterInput.getField(), facetFilterInput.getValues().get(0)); + } }); - return facetFilters; - } + return facetFilters; + } - public static List criterionListFromAndFilter(List andFilters) { - return andFilters != null && !andFilters.isEmpty() - ? andFilters.stream() + public static List criterionListFromAndFilter(List andFilters) { + return andFilters != null && !andFilters.isEmpty() + ? andFilters.stream() .map(filter -> criterionFromFilter(filter)) - .collect(Collectors.toList()) : Collections.emptyList(); + .collect(Collectors.toList()) + : Collections.emptyList(); + } + + // In the case that user sends filters to be or-d together, we need to build a series of + // conjunctive criterion + // arrays, rather than just one for the AND case. + public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( + @Nonnull List orFilters) { + return new ConjunctiveCriterionArray( + orFilters.stream() + .map( + orFilter -> { + CriterionArray andCriterionForOr = + new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); + return new ConjunctiveCriterion().setAnd(andCriterionForOr); + }) + .collect(Collectors.toList())); + } + + @Nullable + public static Filter buildFilter( + @Nullable List andFilters, @Nullable List orFilters) { + if ((andFilters == null || andFilters.isEmpty()) + && (orFilters == null || orFilters.isEmpty())) { + return null; + } + // Or filters are the new default. We will check them first. + // If we have OR filters, we need to build a series of CriterionArrays + if (orFilters != null && !orFilters.isEmpty()) { + return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); } - // In the case that user sends filters to be or-d together, we need to build a series of conjunctive criterion - // arrays, rather than just one for the AND case. - public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( - @Nonnull List orFilters - ) { - return new ConjunctiveCriterionArray(orFilters.stream().map(orFilter -> { - CriterionArray andCriterionForOr = new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); - return new ConjunctiveCriterion().setAnd( - andCriterionForOr - ); - } - ).collect(Collectors.toList())); + // If or filters are not set, someone may be using the legacy and filters + final List andCriterions = criterionListFromAndFilter(andFilters); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + } + + public static Criterion criterionFromFilter(final FacetFilterInput filter) { + return criterionFromFilter(filter, false); + } + + // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) + public static Criterion criterionFromFilter( + final FacetFilterInput filter, final Boolean skipKeywordSuffix) { + Criterion result = new Criterion(); + + if (skipKeywordSuffix) { + result.setField(filter.getField()); + } else { + result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); } - @Nullable - public static Filter buildFilter(@Nullable List andFilters, @Nullable List orFilters) { - if ((andFilters == null || andFilters.isEmpty()) && (orFilters == null || orFilters.isEmpty())) { - return null; - } - - // Or filters are the new default. We will check them first. - // If we have OR filters, we need to build a series of CriterionArrays - if (orFilters != null && !orFilters.isEmpty()) { - return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); - } - - // If or filters are not set, someone may be using the legacy and filters - final List andCriterions = criterionListFromAndFilter(andFilters); - return new Filter().setOr( - new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + // `value` is deprecated in place of `values`- this is to support old query patterns. If values + // is provided, + // this statement will be skipped + if (filter.getValues() == null && filter.getValue() != null) { + result.setValues(new StringArray(filter.getValue())); + result.setValue(filter.getValue()); + } else if (filter.getValues() != null) { + result.setValues(new StringArray(filter.getValues())); + if (!filter.getValues().isEmpty()) { + result.setValue(filter.getValues().get(0)); + } else { + result.setValue(""); + } + } else { + result.setValues(new StringArray()); + result.setValue(""); } - public static Criterion criterionFromFilter(final FacetFilterInput filter) { - return criterionFromFilter(filter, false); + if (filter.getCondition() != null) { + result.setCondition(Condition.valueOf(filter.getCondition().toString())); + } else { + result.setCondition(Condition.EQUAL); } - // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) - public static Criterion criterionFromFilter(final FacetFilterInput filter, final Boolean skipKeywordSuffix) { - Criterion result = new Criterion(); - - if (skipKeywordSuffix) { - result.setField(filter.getField()); - } else { - result.setField(getFilterField(filter.getField(), skipKeywordSuffix)); - } - - // `value` is deprecated in place of `values`- this is to support old query patterns. If values is provided, - // this statement will be skipped - if (filter.getValues() == null && filter.getValue() != null) { - result.setValues(new StringArray(filter.getValue())); - result.setValue(filter.getValue()); - } else if (filter.getValues() != null) { - result.setValues(new StringArray(filter.getValues())); - if (!filter.getValues().isEmpty()) { - result.setValue(filter.getValues().get(0)); - } else { - result.setValue(""); - } - } else { - result.setValues(new StringArray()); - result.setValue(""); - } - - - if (filter.getCondition() != null) { - result.setCondition(Condition.valueOf(filter.getCondition().toString())); - } else { - result.setCondition(Condition.EQUAL); - } - - if (filter.getNegated() != null) { - result.setNegated(filter.getNegated()); - } - - return result; + if (filter.getNegated() != null) { + result.setNegated(filter.getNegated()); } - private static String getFilterField(final String originalField, final boolean skipKeywordSuffix) { - if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { - return originalField; - } - return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + return result; + } + + private static String getFilterField( + final String originalField, final boolean skipKeywordSuffix) { + if (KEYWORD_EXCLUDED_FILTERS.contains(originalField)) { + return originalField; } + return ESUtils.toKeywordField(originalField, skipKeywordSuffix); + } - public static Filter buildFilterWithUrns(@Nonnull Set urns, @Nullable Filter inputFilters) { - Criterion urnMatchCriterion = new Criterion().setField("urn") + public static Filter buildFilterWithUrns(@Nonnull Set urns, @Nullable Filter inputFilters) { + Criterion urnMatchCriterion = + new Criterion() + .setField("urn") .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); - if (inputFilters == null) { - return QueryUtils.newFilter(urnMatchCriterion); - } - - // Add urn match criterion to each or clause - if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { - for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { - conjunctiveCriterion.getAnd().add(urnMatchCriterion); - } - return inputFilters; - } - return QueryUtils.newFilter(urnMatchCriterion); + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + if (inputFilters == null) { + return QueryUtils.newFilter(urnMatchCriterion); + } + + // Add urn match criterion to each or clause + if (inputFilters.getOr() != null && !inputFilters.getOr().isEmpty()) { + for (ConjunctiveCriterion conjunctiveCriterion : inputFilters.getOr()) { + conjunctiveCriterion.getAnd().add(urnMatchCriterion); + } + return inputFilters; } + return QueryUtils.newFilter(urnMatchCriterion); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index b5b13cc00b40d0..2a074b950d0ff5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Assertion; @@ -26,13 +28,9 @@ import java.util.stream.Collectors; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * GraphQL Resolver used for fetching AssertionRunEvents. - */ -public class AssertionRunEventResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching AssertionRunEvents. */ +public class AssertionRunEventResolver + implements DataFetcher> { private final EntityClient _client; @@ -42,58 +40,72 @@ public AssertionRunEventResolver(final EntityClient client) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String urn = ((Assertion) environment.getSource()).getUrn(); - final String maybeStatus = environment.getArgumentOrDefault("status", null); - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; + final String urn = ((Assertion) environment.getSource()).getUrn(); + final String maybeStatus = environment.getArgumentOrDefault("status", null); + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; - try { - // Step 1: Fetch aspects from GMS - List aspects = _client.getTimeseriesAspectValues( - urn, - Constants.ASSERTION_ENTITY_NAME, - Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - buildFilter(maybeFilters, maybeStatus), - context.getAuthentication()); + try { + // Step 1: Fetch aspects from GMS + List aspects = + _client.getTimeseriesAspectValues( + urn, + Constants.ASSERTION_ENTITY_NAME, + Constants.ASSERTION_RUN_EVENT_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilter(maybeFilters, maybeStatus), + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - List runEvents = aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); + // Step 2: Bind profiles into GraphQL strong types. + List runEvents = + aspects.stream().map(AssertionRunEventMapper::map).collect(Collectors.toList()); - // Step 3: Package and return response. - final AssertionRunEventsResult result = new AssertionRunEventsResult(); - result.setTotal(runEvents.size()); - result.setFailed(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.FAILURE.equals( - runEvent.getResult().getType() - )).count())); - result.setSucceeded(Math.toIntExact(runEvents.stream().filter(runEvent -> - AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) - && runEvent.getResult() != null - && AssertionResultType.SUCCESS.equals(runEvent.getResult().getType() - )).count())); - result.setRunEvents(runEvents); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 3: Package and return response. + final AssertionRunEventsResult result = new AssertionRunEventsResult(); + result.setTotal(runEvents.size()); + result.setFailed( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.FAILURE.equals( + runEvent.getResult().getType())) + .count())); + result.setSucceeded( + Math.toIntExact( + runEvents.stream() + .filter( + runEvent -> + AssertionRunStatus.COMPLETE.equals(runEvent.getStatus()) + && runEvent.getResult() != null + && AssertionResultType.SUCCESS.equals( + runEvent.getResult().getType())) + .count())); + result.setRunEvents(runEvents); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } @Nullable - public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable final String status) { + public static Filter buildFilter( + @Nullable FilterInput filtersInput, @Nullable final String status) { if (filtersInput == null && status == null) { return null; } @@ -107,8 +119,14 @@ public static Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable f if (filtersInput != null) { facetFilters.addAll(filtersInput.getAnd()); } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(facetFilters.stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + facetFilters.stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java index 8006ae7d2a464c..89912b2814e400 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolver.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionInfo; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -19,63 +19,76 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver that deletes an Assertion. - */ +/** GraphQL Resolver that deletes an Assertion. */ @Slf4j -public class DeleteAssertionResolver implements DataFetcher> { +public class DeleteAssertionResolver implements DataFetcher> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteAssertionResolver(final EntityClient entityClient, final EntityService entityService) { + public DeleteAssertionResolver( + final EntityClient entityClient, final EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn assertionUrn = Urn.createFromString(environment.getArgument("urn")); - return CompletableFuture.supplyAsync(() -> { - - // 1. check the entity exists. If not, return false. - if (!_entityService.exists(assertionUrn)) { - return true; - } - - if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { - try { - _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(assertionUrn, context.getAuthentication()); - } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for assertion with urn %s", assertionUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // 1. check the entity exists. If not, return false. + if (!_entityService.exists(assertionUrn)) { return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against assertion with urn %s", assertionUrn), e); } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + + if (isAuthorizedToDeleteAssertion(context, assertionUrn)) { + try { + _entityClient.deleteEntity(assertionUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences( + assertionUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for assertion with urn %s", + assertionUrn), + e); + } + }); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against assertion with urn %s", assertionUrn), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - /** - * Determine whether the current user is allowed to remove an assertion. - */ - private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final Urn assertionUrn) { + /** Determine whether the current user is allowed to remove an assertion. */ + private boolean isAuthorizedToDeleteAssertion( + final QueryContext context, final Urn assertionUrn) { // 2. fetch the assertion info AssertionInfo info = - (AssertionInfo) EntityUtils.getAspectFromEntity( - assertionUrn.toString(), Constants.ASSERTION_INFO_ASPECT_NAME, _entityService, null); + (AssertionInfo) + EntityUtils.getAspectFromEntity( + assertionUrn.toString(), + Constants.ASSERTION_INFO_ASPECT_NAME, + _entityService, + null); if (info != null) { // 3. check whether the actor has permission to edit the assertions on the assertee @@ -86,11 +99,14 @@ private boolean isAuthorizedToDeleteAssertion(final QueryContext context, final return true; } - private boolean isAuthorizedToDeleteAssertionFromAssertee(final QueryContext context, final Urn asserteeUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToDeleteAssertionFromAssertee( + final QueryContext context, final Urn asserteeUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_ASSERTIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), context.getActorUrn(), @@ -104,7 +120,8 @@ private Urn getAsserteeUrnFromInfo(final AssertionInfo info) { case DATASET: return info.getDatasetAssertion().getDataset(); default: - throw new RuntimeException(String.format("Unsupported Assertion Type %s provided", info.getType())); + throw new RuntimeException( + String.format("Unsupported Assertion Type %s provided", info.getType())); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java index ff573bb59fba13..9814589df76514 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolver.java @@ -26,11 +26,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of Assertions associated with an Entity. - */ -public class EntityAssertionsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching the list of Assertions associated with an Entity. */ +public class EntityAssertionsResolver + implements DataFetcher> { private static final String ASSERTS_RELATIONSHIP_NAME = "Asserts"; @@ -44,54 +42,60 @@ public EntityAssertionsResolver(final EntityClient entityClient, final GraphClie @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 200); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 200); - try { - // Step 1: Fetch set of assertions associated with the target entity from the Graph Store - final EntityRelationships relationships = _graphClient.getRelatedEntities( - entityUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - start, - count, - context.getActorUrn() - ); + try { + // Step 1: Fetch set of assertions associated with the target entity from the Graph + // Store + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + entityUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + start, + count, + context.getActorUrn()); - final List assertionUrns = relationships.getRelationships().stream().map(EntityRelationship::getEntity).collect(Collectors.toList()); + final List assertionUrns = + relationships.getRelationships().stream() + .map(EntityRelationship::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the assertion entities based on the urns from step 1 - final Map entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the assertion entities based on the urns from step 1 + final Map entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS assertion model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List assertions = gmsResults.stream() - .filter(Objects::nonNull) - .map(AssertionMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS assertion model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List assertions = + gmsResults.stream() + .filter(Objects::nonNull) + .map(AssertionMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final EntityAssertionsResult result = new EntityAssertionsResult(); - result.setCount(relationships.getCount()); - result.setStart(relationships.getStart()); - result.setTotal(relationships.getTotal()); - result.setAssertions(assertions); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); - } - }); + // Step 4: Package and return result + final EntityAssertionsResult result = new EntityAssertionsResult(); + result.setCount(relationships.getCount()); + result.setStart(relationships.getStart()); + result.setTotal(relationships.getTotal()); + result.setAssertions(assertions); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve Assertion Run Events from GMS", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java index 8f5be1000bb453..9015ad0ebb2102 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/AccessTokenUtil.java @@ -5,13 +5,9 @@ import java.time.temporal.ChronoUnit; import java.util.Optional; - - public class AccessTokenUtil { - /** - * Convert an {@link AccessTokenDuration} into its milliseconds equivalent. - */ + /** Convert an {@link AccessTokenDuration} into its milliseconds equivalent. */ public static Optional mapDurationToMs(final AccessTokenDuration duration) { switch (duration) { case ONE_HOUR: @@ -29,9 +25,10 @@ public static Optional mapDurationToMs(final AccessTokenDuration duration) case NO_EXPIRY: return Optional.empty(); default: - throw new RuntimeException(String.format("Unrecognized access token duration %s provided", duration)); + throw new RuntimeException( + String.format("Unrecognized access token duration %s provided", duration)); } } - private AccessTokenUtil() { } + private AccessTokenUtil() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java index cd55d81aec6ad1..14a1b9a1f7a017 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/CreateAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatefulTokenService; @@ -10,10 +12,10 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AccessToken; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.AccessTokenType; import com.linkedin.datahub.graphql.generated.CreateAccessTokenInput; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.metadata.Constants; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -22,12 +24,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for creating personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for creating personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class CreateAccessTokenResolver implements DataFetcher> { @@ -38,62 +35,85 @@ public CreateAccessTokenResolver(final StatefulTokenService statefulTokenService } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final CreateAccessTokenInput input = bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); - - log.info("User {} requesting new access token for user {} ", context.getActorUrn(), input.getActorUrn()); - - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Date date = new Date(); - final long createdAtInMs = date.getTime(); - final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - - final String tokenName = input.getName(); - final String tokenDescription = input.getDescription(); - - final String accessToken = - _statefulTokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null), - createdAtInMs, tokenName, tokenDescription, context.getActorUrn()); - log.info("Generated access token for {} of type {} with duration {}", input.getActorUrn(), input.getType(), - input.getDuration()); - try { - final String tokenHash = _statefulTokenService.hash(accessToken); - - final AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - result.setMetadata(metadata); - - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new access token with name %s", input.getName()), - e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final CreateAccessTokenInput input = + bindArgument(environment.getArgument("input"), CreateAccessTokenInput.class); + + log.info( + "User {} requesting new access token for user {} ", + context.getActorUrn(), + input.getActorUrn()); + + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Date date = new Date(); + final long createdAtInMs = date.getTime(); + final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + + final String tokenName = input.getName(); + final String tokenDescription = input.getDescription(); + + final String accessToken = + _statefulTokenService.generateAccessToken( + type, + createActor(input.getType(), actorUrn), + expiresInMs.orElse(null), + createdAtInMs, + tokenName, + tokenDescription, + context.getActorUrn()); + log.info( + "Generated access token for {} of type {} with duration {}", + input.getActorUrn(), + input.getType(), + input.getDuration()); + try { + final String tokenHash = _statefulTokenService.hash(accessToken); + + final AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn( + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenHash).toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + result.setMetadata(metadata); + + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new access token with name %s", input.getName()), + e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final CreateAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final CreateAccessTokenInput input) { if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final CreateAccessTokenInput input) { + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final CreateAccessTokenInput input) { return AuthorizationUtils.canManageTokens(context) - || input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + || input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -101,6 +121,7 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java index 5ac4ec8ac3a6b5..aed6bd6cb98af0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/GetAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.token.StatelessTokenService; @@ -18,12 +20,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for generating personal & service principal access tokens - */ +/** Resolver for generating personal & service principal access tokens */ @Slf4j public class GetAccessTokenResolver implements DataFetcher> { @@ -34,39 +31,49 @@ public GetAccessTokenResolver(final StatelessTokenService tokenService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final GetAccessTokenInput input = bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final GetAccessTokenInput input = + bindArgument(environment.getArgument("input"), GetAccessTokenInput.class); - if (isAuthorizedToGenerateToken(context, input)) { - final TokenType type = TokenType.valueOf( - input.getType().toString()); // warn: if we are out of sync with AccessTokenType there are problems. - final String actorUrn = input.getActorUrn(); - final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); - final String accessToken = - _tokenService.generateAccessToken(type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); - AccessToken result = new AccessToken(); - result.setAccessToken(accessToken); - return result; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToGenerateToken(context, input)) { + final TokenType type = + TokenType.valueOf( + input + .getType() + .toString()); // warn: if we are out of sync with AccessTokenType there are + // problems. + final String actorUrn = input.getActorUrn(); + final Optional expiresInMs = AccessTokenUtil.mapDurationToMs(input.getDuration()); + final String accessToken = + _tokenService.generateAccessToken( + type, createActor(input.getType(), actorUrn), expiresInMs.orElse(null)); + AccessToken result = new AccessToken(); + result.setAccessToken(accessToken); + return result; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private boolean isAuthorizedToGenerateToken(final QueryContext context, final GetAccessTokenInput input) { + private boolean isAuthorizedToGenerateToken( + final QueryContext context, final GetAccessTokenInput input) { // Currently only an actor can generate a personal token for themselves. if (AccessTokenType.PERSONAL.equals(input.getType())) { return isAuthorizedToGeneratePersonalAccessToken(context, input); } - throw new UnsupportedOperationException(String.format("Unsupported AccessTokenType %s provided", input.getType())); + throw new UnsupportedOperationException( + String.format("Unsupported AccessTokenType %s provided", input.getType())); } - private boolean isAuthorizedToGeneratePersonalAccessToken(final QueryContext context, - final GetAccessTokenInput input) { - return input.getActorUrn().equals(context.getActorUrn()) && AuthorizationUtils.canGeneratePersonalAccessToken( - context); + private boolean isAuthorizedToGeneratePersonalAccessToken( + final QueryContext context, final GetAccessTokenInput input) { + return input.getActorUrn().equals(context.getActorUrn()) + && AuthorizationUtils.canGeneratePersonalAccessToken(context); } private Actor createActor(AccessTokenType tokenType, String actorUrn) { @@ -74,14 +81,16 @@ private Actor createActor(AccessTokenType tokenType, String actorUrn) { // If we are generating a personal access token, then the actor will be of "USER" type. return new Actor(ActorType.USER, createUrn(actorUrn).getId()); } - throw new IllegalArgumentException(String.format("Unsupported token type %s provided", tokenType)); + throw new IllegalArgumentException( + String.format("Unsupported token type %s provided", tokenType)); } private Urn createUrn(final String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new IllegalArgumentException(String.format("Failed to validate provided urn %s", urnStr)); + throw new IllegalArgumentException( + String.format("Failed to validate provided urn %s", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index f9ba552d349e04..5cfa80e394c5ff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,14 +25,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for listing personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for listing personal & service principal v2-type (stateful) access tokens. */ @Slf4j -public class ListAccessTokensResolver implements DataFetcher> { +public class ListAccessTokensResolver + implements DataFetcher> { private static final String EXPIRES_AT_FIELD_NAME = "expiresAt"; @@ -41,60 +39,87 @@ public ListAccessTokensResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final ListAccessTokenInput input = bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final ListAccessTokenInput input = + bindArgument(environment.getArgument("input"), ListAccessTokenInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - log.info("User {} listing access tokens with filters {}", context.getActorUrn(), filters.toString()); + log.info( + "User {} listing access tokens with filters {}", + context.getActorUrn(), + filters.toString()); - if (AuthorizationUtils.canManageTokens(context) || isListingSelfTokens(filters, context)) { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(EXPIRES_AT_FIELD_NAME).setOrder(SortOrder.DESCENDING); - final SearchResult searchResult = _entityClient.search(Constants.ACCESS_TOKEN_ENTITY_NAME, "", - buildFilter(filters, Collections.emptyList()), sortCriterion, start, count, - getAuthentication(environment), new SearchFlags().setFulltext(true)); + if (AuthorizationUtils.canManageTokens(context) + || isListingSelfTokens(filters, context)) { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(EXPIRES_AT_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); + final SearchResult searchResult = + _entityClient.search( + Constants.ACCESS_TOKEN_ENTITY_NAME, + "", + buildFilter(filters, Collections.emptyList()), + sortCriterion, + start, + count, + getAuthentication(environment), + new SearchFlags().setFulltext(true)); - final List tokens = searchResult.getEntities().stream().map(entity -> { - final AccessTokenMetadata metadata = new AccessTokenMetadata(); - metadata.setUrn(entity.getEntity().toString()); - metadata.setType(EntityType.ACCESS_TOKEN); - return metadata; - }).collect(Collectors.toList()); + final List tokens = + searchResult.getEntities().stream() + .map( + entity -> { + final AccessTokenMetadata metadata = new AccessTokenMetadata(); + metadata.setUrn(entity.getEntity().toString()); + metadata.setType(EntityType.ACCESS_TOKEN); + return metadata; + }) + .collect(Collectors.toList()); - final ListAccessTokenResult result = new ListAccessTokenResult(); - result.setTokens(tokens); - result.setStart(searchResult.getFrom()); - result.setCount(searchResult.getPageSize()); - result.setTotal(searchResult.getNumEntities()); + final ListAccessTokenResult result = new ListAccessTokenResult(); + result.setTokens(tokens); + result.setStart(searchResult.getFrom()); + result.setCount(searchResult.getPageSize()); + result.setTotal(searchResult.getNumEntities()); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list access tokens", e); - } - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list access tokens", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Utility method to answer: Does the existing security context have permissions to generate their personal tokens - * AND is the request coming in requesting those personal tokens? - *

- * Note: We look for the actorUrn field because a token generated by someone else means that the generator actor has - * manage all access token privileges which means that he/she will be bound to just listing their own tokens. + * Utility method to answer: Does the existing security context have permissions to generate their + * personal tokens AND is the request coming in requesting those personal tokens? + * + *

Note: We look for the actorUrn field because a token generated by someone else means that + * the generator actor has manage all access token privileges which means that he/she will be + * bound to just listing their own tokens. * * @param filters The filters being used in the request. * @param context Current security context. * @return A boolean stating if the current user can list its personal tokens. */ - private boolean isListingSelfTokens(final List filters, final QueryContext context) { - return AuthorizationUtils.canGeneratePersonalAccessToken(context) && filters.stream() - .anyMatch(filter -> filter.getField().equals("ownerUrn") && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); + private boolean isListingSelfTokens( + final List filters, final QueryContext context) { + return AuthorizationUtils.canGeneratePersonalAccessToken(context) + && filters.stream() + .anyMatch( + filter -> + filter.getField().equals("ownerUrn") + && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java index 252c0eaba6e854..8d0a23e665b1bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/RevokeAccessTokenResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.token.StatefulTokenService; import com.google.common.collect.ImmutableSet; import com.linkedin.access.token.DataHubAccessTokenInfo; @@ -18,42 +20,39 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver for revoking personal & service principal v2-type (stateful) access tokens. - */ +/** Resolver for revoking personal & service principal v2-type (stateful) access tokens. */ @Slf4j public class RevokeAccessTokenResolver implements DataFetcher> { private final EntityClient _entityClient; private final StatefulTokenService _statefulTokenService; - public RevokeAccessTokenResolver(final EntityClient entityClient, final StatefulTokenService statefulTokenService) { + public RevokeAccessTokenResolver( + final EntityClient entityClient, final StatefulTokenService statefulTokenService) { _entityClient = entityClient; _statefulTokenService = statefulTokenService; } @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - return CompletableFuture.supplyAsync(() -> { - final QueryContext context = environment.getContext(); - final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + final String tokenId = bindArgument(environment.getArgument("tokenId"), String.class); - log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); + log.info("User {} revoking access token {}", context.getActorUrn(), tokenId); - if (isAuthorizedToRevokeToken(context, tokenId)) { - try { - _statefulTokenService.revokeAccessToken(tokenId); - } catch (Exception e) { - throw new RuntimeException("Failed to revoke access token", e); - } - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + if (isAuthorizedToRevokeToken(context, tokenId)) { + try { + _statefulTokenService.revokeAccessToken(tokenId); + } catch (Exception e) { + throw new RuntimeException("Failed to revoke access token", e); + } + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private boolean isAuthorizedToRevokeToken(final QueryContext context, final String tokenId) { @@ -62,12 +61,17 @@ private boolean isAuthorizedToRevokeToken(final QueryContext context, final Stri private boolean isOwnerOfAccessToken(final QueryContext context, final String tokenId) { try { - final EntityResponse entityResponse = _entityClient.getV2(Constants.ACCESS_TOKEN_ENTITY_NAME, - Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), - ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), context.getAuthentication()); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + Urn.createFromTuple(Constants.ACCESS_TOKEN_ENTITY_NAME, tokenId), + ImmutableSet.of(Constants.ACCESS_TOKEN_INFO_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(Constants.ACCESS_TOKEN_INFO_NAME)) { + final DataMap data = + entityResponse.getAspects().get(Constants.ACCESS_TOKEN_INFO_NAME).getValue().data(); final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(data); return tokenInfo.getOwnerUrn().toString().equals(context.getActorUrn()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java index 4a1964b36032ca..40c91b43850f76 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowsePathsResolver.java @@ -1,61 +1,65 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.BrowsePathsInput; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.Collections; -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowsePathsResolver implements DataFetcher>> { - private static final Logger _logger = LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); - - private final Map> _typeToEntity; - - public BrowsePathsResolver(@Nonnull final List> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } - - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final BrowsePathsInput input = bindArgument(environment.getArgument("input"), BrowsePathsInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Fetch browse paths. entity type: %s, urn: %s", - input.getType(), - input.getUrn())); - if (_typeToEntity.containsKey(input.getType())) { - return _typeToEntity.get(input.getType()).browsePaths(input.getUrn(), environment.getContext()); - } - // Browse path is impl detail. - return Collections.emptyList(); - } catch (Exception e) { - _logger.error("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()) + " " + e.getMessage()); - throw new RuntimeException("Failed to retrieve browse paths: " - + String.format("entity type %s, urn %s", - input.getType(), - input.getUrn()), e); + private static final Logger _logger = + LoggerFactory.getLogger(BrowsePathsResolver.class.getName()); + + private final Map> _typeToEntity; + + public BrowsePathsResolver(@Nonnull final List> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } + + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final BrowsePathsInput input = + bindArgument(environment.getArgument("input"), BrowsePathsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Fetch browse paths. entity type: %s, urn: %s", + input.getType(), input.getUrn())); + if (_typeToEntity.containsKey(input.getType())) { + return _typeToEntity + .get(input.getType()) + .browsePaths(input.getUrn(), environment.getContext()); } + // Browse path is impl detail. + return Collections.emptyList(); + } catch (Exception e) { + _logger.error( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to retrieve browse paths: " + + String.format("entity type %s, urn %s", input.getType(), input.getUrn()), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java index 9c95eceb1e78fd..287d0eef8aec8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseResolver.java @@ -1,77 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.browse; -import com.linkedin.datahub.graphql.types.BrowsableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.generated.BrowseInput; import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.BrowsableEntityType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class BrowseResolver implements DataFetcher> { - private static final int DEFAULT_START = 0; - private static final int DEFAULT_COUNT = 10; + private static final int DEFAULT_START = 0; + private static final int DEFAULT_COUNT = 10; - private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); + private static final Logger _logger = LoggerFactory.getLogger(BrowseResolver.class.getName()); - private final Map> _typeToEntity; + private final Map> _typeToEntity; - public BrowseResolver(@Nonnull final List> browsableEntities) { - _typeToEntity = browsableEntities.stream().collect(Collectors.toMap( - BrowsableEntityType::type, - entity -> entity - )); - } + public BrowseResolver(@Nonnull final List> browsableEntities) { + _typeToEntity = + browsableEntities.stream() + .collect(Collectors.toMap(BrowsableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final BrowseInput input = bindArgument(environment.getArgument("input"), BrowseInput.class); - final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; - final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; + final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; + final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug( - String.format("Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count)); - return _typeToEntity.get(input.getType()).browse( - input.getPath(), - input.getFilters(), - start, - count, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count) + " " + e.getMessage()); - throw new RuntimeException("Failed to execute browse: " - + String.format("entity type: %s, path: %s, filters: %s, start: %s, count: %s", - input.getType(), - input.getPath(), - input.getFilters(), - start, - count), e); - } + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + String.format( + "Executing browse. entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count)); + return _typeToEntity + .get(input.getType()) + .browse( + input.getPath(), input.getFilters(), start, count, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute browse: " + + String.format( + "entity type: %s, path: %s, filters: %s, start: %s, count: %s", + input.getType(), input.getPath(), input.getFilters(), start, count), + e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java index 81f82c93f1fa74..396d91c37d81c6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolver.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.browse; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.BrowsePath; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.types.BrowsableEntityType; -import com.linkedin.datahub.graphql.generated.BrowsePath; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; +import javax.annotation.Nonnull; public class EntityBrowsePathsResolver implements DataFetcher>> { @@ -24,12 +24,14 @@ public CompletableFuture> get(DataFetchingEnvironment environme final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - return CompletableFuture.supplyAsync(() -> { - try { - return _browsableType.browsePaths(urn, context); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _browsableType.browsePaths(urn, context); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to retrieve browse paths for entity with urn %s", urn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java index 76abddc9a99a96..da4a3a76dd7e0e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/BrowseV2Resolver.java @@ -1,11 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.chart; +import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.BrowseResultGroupV2; import com.linkedin.datahub.graphql.generated.BrowseResultMetadata; import com.linkedin.datahub.graphql.generated.BrowseResultsV2; import com.linkedin.datahub.graphql.generated.BrowseV2Input; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.resolvers.search.SearchUtils; @@ -17,18 +23,13 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.BROWSE_PATH_V2_DELIMITER; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -44,38 +45,60 @@ public class BrowseV2Resolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final BrowseV2Input input = bindArgument(environment.getArgument("input"), BrowseV2Input.class); - final String entityName = EntityTypeMapper.getName(input.getType()); + final List entityNames = getEntityNames(input); final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; final String query = input.getQuery() != null ? input.getQuery() : "*"; // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(query); - return CompletableFuture.supplyAsync(() -> { - try { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - final String pathStr = input.getPath().size() > 0 ? BROWSE_PATH_V2_DELIMITER + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) : ""; - final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); + return CompletableFuture.supplyAsync( + () -> { + try { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + final String pathStr = + input.getPath().size() > 0 + ? BROWSE_PATH_V2_DELIMITER + + String.join(BROWSE_PATH_V2_DELIMITER, input.getPath()) + : ""; + final Filter filter = ResolverUtils.buildFilter(null, input.getOrFilters()); - BrowseResultV2 browseResults = _entityClient.browseV2( - entityName, - pathStr, - maybeResolvedView != null - ? SearchUtils.combineFilters(filter, maybeResolvedView.getDefinition().getFilter()) - : filter, - sanitizedQuery, - start, - count, - context.getAuthentication() - ); - return mapBrowseResults(browseResults); - } catch (Exception e) { - throw new RuntimeException("Failed to execute browse V2", e); - } - }); + BrowseResultV2 browseResults = + _entityClient.browseV2( + entityNames, + pathStr, + maybeResolvedView != null + ? SearchUtils.combineFilters( + filter, maybeResolvedView.getDefinition().getFilter()) + : filter, + sanitizedQuery, + start, + count, + context.getAuthentication()); + return mapBrowseResults(browseResults); + } catch (Exception e) { + throw new RuntimeException("Failed to execute browse V2", e); + } + }); + } + + public static List getEntityNames(BrowseV2Input input) { + List entityTypes; + if (input.getTypes() != null && input.getTypes().size() > 0) { + entityTypes = input.getTypes(); + } else if (input.getType() != null) { + entityTypes = ImmutableList.of(input.getType()); + } else { + entityTypes = BROWSE_ENTITY_TYPES; + } + return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); } private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { @@ -85,28 +108,29 @@ private BrowseResultsV2 mapBrowseResults(BrowseResultV2 browseResults) { results.setCount(browseResults.getPageSize()); List groups = new ArrayList<>(); - browseResults.getGroups().forEach(group -> { - BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); - browseGroup.setName(group.getName()); - browseGroup.setCount(group.getCount()); - browseGroup.setHasSubGroups(group.isHasSubGroups()); - if (group.hasUrn() && group.getUrn() != null) { - browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); - } - groups.add(browseGroup); - }); + browseResults + .getGroups() + .forEach( + group -> { + BrowseResultGroupV2 browseGroup = new BrowseResultGroupV2(); + browseGroup.setName(group.getName()); + browseGroup.setCount(group.getCount()); + browseGroup.setHasSubGroups(group.isHasSubGroups()); + if (group.hasUrn() && group.getUrn() != null) { + browseGroup.setEntity(UrnToEntityMapper.map(group.getUrn())); + } + groups.add(browseGroup); + }); results.setGroups(groups); BrowseResultMetadata resultMetadata = new BrowseResultMetadata(); - resultMetadata.setPath(Arrays.stream(browseResults.getMetadata().getPath() - .split(BROWSE_PATH_V2_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()) - ); + resultMetadata.setPath( + Arrays.stream(browseResults.getMetadata().getPath().split(BROWSE_PATH_V2_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList())); resultMetadata.setTotalNumEntities(browseResults.getMetadata().getTotalNumEntities()); results.setMetadata(resultMetadata); return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java index 207da02de6ec2d..a2d04a26bfa97d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/chart/ChartStatsSummaryResolver.java @@ -11,24 +11,23 @@ import java.util.concurrent.TimeUnit; import lombok.extern.slf4j.Slf4j; - @Slf4j -public class ChartStatsSummaryResolver implements DataFetcher> { +public class ChartStatsSummaryResolver + implements DataFetcher> { private final TimeseriesAspectService timeseriesAspectService; private final Cache summaryCache; public ChartStatsSummaryResolver(final TimeseriesAspectService timeseriesAspectService) { this.timeseriesAspectService = timeseriesAspectService; - this.summaryCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(6, TimeUnit.HOURS) - .build(); + this.summaryCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(6, TimeUnit.HOURS).build(); } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { // Not yet implemented return CompletableFuture.completedFuture(null); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java index f6bc68caa0821c..81b52991cde90c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/config/AppConfigResolver.java @@ -35,10 +35,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * Resolver responsible for serving app configurations to the React UI. - */ +/** Resolver responsible for serving app configurations to the React UI. */ public class AppConfigResolver implements DataFetcher> { private final GitVersion _gitVersion; @@ -82,7 +79,8 @@ public AppConfigResolver( } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); @@ -103,19 +101,20 @@ public CompletableFuture get(final DataFetchingEnvironment environmen final PoliciesConfig policiesConfig = new PoliciesConfig(); policiesConfig.setEnabled(_authorizationConfiguration.getDefaultAuthorizer().isEnabled()); - policiesConfig.setPlatformPrivileges(com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES - .stream() - .map(this::mapPrivilege) - .collect(Collectors.toList())); + policiesConfig.setPlatformPrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.PLATFORM_PRIVILEGES.stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); - policiesConfig.setResourcePrivileges(com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES - .stream() - .map(this::mapResourcePrivileges) - .collect(Collectors.toList()) - ); + policiesConfig.setResourcePrivileges( + com.linkedin.metadata.authorization.PoliciesConfig.RESOURCE_PRIVILEGES.stream() + .map(this::mapResourcePrivileges) + .collect(Collectors.toList())); final IdentityManagementConfig identityManagementConfig = new IdentityManagementConfig(); - identityManagementConfig.setEnabled(true); // Identity Management always enabled. TODO: Understand if there's a case where this should change. + identityManagementConfig.setEnabled( + true); // Identity Management always enabled. TODO: Understand if there's a case where this + // should change. final ManagedIngestionConfig ingestionConfig = new ManagedIngestionConfig(); ingestionConfig.setEnabled(_ingestionConfiguration.isEnabled()); @@ -133,7 +132,8 @@ public CompletableFuture get(final DataFetchingEnvironment environmen } if (_visualConfiguration != null && _visualConfiguration.getQueriesTab() != null) { QueriesTabConfig queriesTabConfig = new QueriesTabConfig(); - queriesTabConfig.setQueriesTabResultSize(_visualConfiguration.getQueriesTab().getQueriesTabResultSize()); + queriesTabConfig.setQueriesTabResultSize( + _visualConfiguration.getQueriesTab().getQueriesTabResultSize()); visualConfig.setQueriesTab(queriesTabConfig); } if (_visualConfiguration != null && _visualConfiguration.getEntityProfile() != null) { @@ -148,7 +148,8 @@ public CompletableFuture get(final DataFetchingEnvironment environmen if (_visualConfiguration != null && _visualConfiguration.getSearchResult() != null) { SearchResultsVisualConfig searchResultsVisualConfig = new SearchResultsVisualConfig(); if (_visualConfiguration.getSearchResult().getEnableNameHighlight() != null) { - searchResultsVisualConfig.setEnableNameHighlight(_visualConfiguration.getSearchResult().getEnableNameHighlight()); + searchResultsVisualConfig.setEnableNameHighlight( + _visualConfiguration.getSearchResult().getEnableNameHighlight()); } visualConfig.setSearchResult(searchResultsVisualConfig); } @@ -166,14 +167,16 @@ public CompletableFuture get(final DataFetchingEnvironment environmen viewsConfig.setEnabled(_viewsConfiguration.isEnabled()); appConfig.setViewsConfig(viewsConfig); - final FeatureFlagsConfig featureFlagsConfig = FeatureFlagsConfig.builder() - .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) - .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) - .setShowBrowseV2(_featureFlags.isShowBrowseV2()) - .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) - .setShowAccessManagement(_featureFlags.isShowAccessManagement()) - .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) - .build(); + final FeatureFlagsConfig featureFlagsConfig = + FeatureFlagsConfig.builder() + .setShowSearchFiltersV2(_featureFlags.isShowSearchFiltersV2()) + .setReadOnlyModeEnabled(_featureFlags.isReadOnlyModeEnabled()) + .setShowBrowseV2(_featureFlags.isShowBrowseV2()) + .setShowAcrylInfo(_featureFlags.isShowAcrylInfo()) + .setShowAccessManagement(_featureFlags.isShowAccessManagement()) + .setNestedDomainsEnabled(_featureFlags.isNestedDomainsEnabled()) + .setPlatformBrowseV2(_featureFlags.isPlatformBrowseV2()) + .build(); appConfig.setFeatureFlags(featureFlagsConfig); @@ -185,14 +188,17 @@ private ResourcePrivileges mapResourcePrivileges( final ResourcePrivileges graphQLPrivileges = new ResourcePrivileges(); graphQLPrivileges.setResourceType(resourcePrivileges.getResourceType()); graphQLPrivileges.setResourceTypeDisplayName(resourcePrivileges.getResourceTypeDisplayName()); - graphQLPrivileges.setEntityType(mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); + graphQLPrivileges.setEntityType( + mapResourceTypeToEntityType(resourcePrivileges.getResourceType())); graphQLPrivileges.setPrivileges( - resourcePrivileges.getPrivileges().stream().map(this::mapPrivilege).collect(Collectors.toList()) - ); + resourcePrivileges.getPrivileges().stream() + .map(this::mapPrivilege) + .collect(Collectors.toList())); return graphQLPrivileges; } - private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { + private Privilege mapPrivilege( + com.linkedin.metadata.authorization.PoliciesConfig.Privilege privilege) { final Privilege graphQLPrivilege = new Privilege(); graphQLPrivilege.setType(privilege.getType()); graphQLPrivilege.setDisplayName(privilege.getDisplayName()); @@ -202,29 +208,53 @@ private Privilege mapPrivilege(com.linkedin.metadata.authorization.PoliciesConfi private EntityType mapResourceTypeToEntityType(final String resourceType) { // TODO: Is there a better way to instruct the UI to present a searchable resource? - if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES.getResourceType().equals(resourceType)) { + if (com.linkedin.metadata.authorization.PoliciesConfig.DATASET_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATASET; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DASHBOARD_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DASHBOARD; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CHART_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CHART; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_FLOW_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_FLOW; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DATA_JOB_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DATA_JOB; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.TAG_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.TAG; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_TERM_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_TERM; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.GLOSSARY_NODE_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.GLOSSARY_NODE; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.DOMAIN_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.DOMAIN; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CONTAINER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CONTAINER; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_GROUP_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_GROUP; - } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES.getResourceType().equals(resourceType)) { + } else if (com.linkedin.metadata.authorization.PoliciesConfig.CORP_USER_PRIVILEGES + .getResourceType() + .equals(resourceType)) { return EntityType.CORP_USER; } else { return null; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java index 4b8bd37a4fabef..58f7715c3e627e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; @@ -20,21 +22,16 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j public class ContainerEntitiesResolver implements DataFetcher> { - static final List CONTAINABLE_ENTITY_NAMES = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CONTAINER_ENTITY_NAME - ); + static final List CONTAINABLE_ENTITY_NAMES = + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CONTAINER_ENTITY_NAME); private static final String CONTAINER_FIELD_NAME = "container"; private static final String INPUT_ARG_NAME = "input"; private static final String DEFAULT_QUERY = "*"; @@ -55,45 +52,53 @@ public ContainerEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Container) environment.getSource()).getUrn(); - final ContainerEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final ContainerEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), ContainerEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : "*"; final int start = input.getStart() != null ? input.getStart() : 0; final int count = input.getCount() != null ? input.getCount() : 20; - return CompletableFuture.supplyAsync(() -> { - - try { - - final Criterion filterCriterion = new Criterion() - .setField(CONTAINER_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - CONTAINABLE_ENTITY_NAMES, - query, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with container with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final Criterion filterCriterion = + new Criterion() + .setField(CONTAINER_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + CONTAINABLE_ENTITY_NAMES, + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve entities associated with container with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java index 90fad4ca4578a1..9502fb8e5cb931 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -12,15 +14,13 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; - -public class ParentContainersResolver implements DataFetcher> { +public class ParentContainersResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -28,21 +28,25 @@ public ParentContainersResolver(final EntityClient entityClient) { _entityClient = entityClient; } - private void aggregateParentContainers(List containers, String urn, QueryContext context) { + private void aggregateParentContainers( + List containers, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(CONTAINER_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(CONTAINER_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { + if (entityResponse != null + && entityResponse.getAspects().containsKey(CONTAINER_ASPECT_NAME)) { DataMap dataMap = entityResponse.getAspects().get(CONTAINER_ASPECT_NAME).getValue().data(); com.linkedin.container.Container container = new com.linkedin.container.Container(dataMap); Urn containerUrn = container.getContainer(); - EntityResponse response = _entityClient.getV2(containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + containerUrn.getEntityType(), containerUrn, null, context.getAuthentication()); if (response != null) { Container mappedContainer = ContainerMapper.map(response); containers.add(mappedContainer); @@ -61,16 +65,17 @@ public CompletableFuture get(DataFetchingEnvironment env final String urn = ((Entity) environment.getSource()).getUrn(); final List containers = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - aggregateParentContainers(containers, urn, context); - final ParentContainersResult result = new ParentContainersResult(); - result.setCount(containers.size()); - result.setContainers(containers); - return result; - } catch (DataHubGraphQLException e) { - throw new RuntimeException("Failed to load all containers", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + aggregateParentContainers(containers, urn, context); + final ParentContainersResult result = new ParentContainersResult(); + result.setCount(containers.size()); + result.setContainers(containers); + return result; + } catch (DataHubGraphQLException e) { + throw new RuntimeException("Failed to load all containers", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java index db125384745a10..b5480359bde6a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpUser; -import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardStatsSummary; +import com.linkedin.datahub.graphql.generated.DashboardUsageMetrics; import com.linkedin.datahub.graphql.generated.DashboardUserUsageCounts; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.query.filter.Filter; @@ -19,10 +21,9 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - @Slf4j -public class DashboardStatsSummaryResolver implements DataFetcher> { +public class DashboardStatsSummaryResolver + implements DataFetcher> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -32,63 +33,72 @@ public class DashboardStatsSummaryResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } - - try { - - final DashboardStatsSummary result = new DashboardStatsSummary(); - - // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. - List dashboardUsageMetrics = - getDashboardUsageMetrics(resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); - if (dashboardUsageMetrics.size() > 0) { - result.setViewCount(getDashboardViewCount(resourceUrn)); - } - - // Obtain unique user statistics, by rolling up unique users over the past month. - List userUsageCounts = getDashboardUsagePerUser(resourceUrn); - result.setUniqueUserCountLast30Days(userUsageCounts.size()); - result.setTopUsersLast30Days( - trimUsers(userUsageCounts.stream().map(DashboardUserUsageCounts::getUser).collect(Collectors.toList()))); - - this.summaryCache.put(resourceUrn, result); - return result; - - } catch (Exception e) { - log.error(String.format("Failed to load dashboard usage summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } + + try { + + final DashboardStatsSummary result = new DashboardStatsSummary(); + + // Obtain total dashboard view count, by viewing the latest reported dashboard metrics. + List dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); + if (dashboardUsageMetrics.size() > 0) { + result.setViewCount(getDashboardViewCount(resourceUrn)); + } + + // Obtain unique user statistics, by rolling up unique users over the past month. + List userUsageCounts = getDashboardUsagePerUser(resourceUrn); + result.setUniqueUserCountLast30Days(userUsageCounts.size()); + result.setTopUsersLast30Days( + trimUsers( + userUsageCounts.stream() + .map(DashboardUserUsageCounts::getUser) + .collect(Collectors.toList()))); + + this.summaryCache.put(resourceUrn, result); + return result; + + } catch (Exception e) { + log.error( + String.format( + "Failed to load dashboard usage summary for resource %s", + resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private int getDashboardViewCount(final Urn resourceUrn) { - List dashboardUsageMetrics = getDashboardUsageMetrics( - resourceUrn.toString(), - null, - null, - 1, - this.timeseriesAspectService); + List dashboardUsageMetrics = + getDashboardUsageMetrics( + resourceUrn.toString(), null, null, 1, this.timeseriesAspectService); return dashboardUsageMetrics.get(0).getViewsCount(); } private List getDashboardUsagePerUser(final Urn resourceUrn) { long now = System.currentTimeMillis(); long nowMinusOneMonth = timeMinusOneMonth(now); - Filter bucketStatsFilter = createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); + Filter bucketStatsFilter = + createUsageFilter(resourceUrn.toString(), nowMinusOneMonth, now, true); return getUserUsageCounts(bucketStatsFilter, this.timeseriesAspectService); } @@ -98,4 +108,4 @@ private List trimUsers(final List originalUsers) { } return originalUsers; } - } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java index 24e1db33e9d404..07d028b07b01d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DashboardUsageAggregation; @@ -26,16 +28,14 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - /** * Resolver used for resolving the usage statistics of a Dashboard. - *

- * Returns daily as well as absolute usage metrics of Dashboard + * + *

Returns daily as well as absolute usage metrics of Dashboard */ @Slf4j -public class DashboardUsageStatsResolver implements DataFetcher> { +public class DashboardUsageStatsResolver + implements DataFetcher> { private static final String ES_FIELD_EVENT_GRANULARITY = "eventGranularity"; private final TimeseriesAspectService timeseriesAspectService; @@ -44,34 +44,40 @@ public DashboardUsageStatsResolver(TimeseriesAspectService timeseriesAspectServi } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final String dashboardUrn = ((Entity) environment.getSource()).getUrn(); final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); // Max number of aspects to return for absolute dashboard usage. final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - return CompletableFuture.supplyAsync(() -> { - DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); + return CompletableFuture.supplyAsync( + () -> { + DashboardUsageQueryResult usageQueryResult = new DashboardUsageQueryResult(); - // Time Bucket Stats - Filter bucketStatsFilter = createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); - List dailyUsageBuckets = getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); + // Time Bucket Stats + Filter bucketStatsFilter = + createUsageFilter(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, true); + List dailyUsageBuckets = + getBuckets(bucketStatsFilter, dashboardUrn, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + getAggregations(bucketStatsFilter, dailyUsageBuckets, timeseriesAspectService); - usageQueryResult.setBuckets(dailyUsageBuckets); - usageQueryResult.setAggregations(aggregations); + usageQueryResult.setBuckets(dailyUsageBuckets); + usageQueryResult.setAggregations(aggregations); - // Absolute usage metrics - List dashboardUsageMetrics = - getDashboardUsageMetrics(dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); - usageQueryResult.setMetrics(dashboardUsageMetrics); - return usageQueryResult; - }); + // Absolute usage metrics + List dashboardUsageMetrics = + getDashboardUsageMetrics( + dashboardUrn, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit); + usageQueryResult.setMetrics(dashboardUsageMetrics); + return usageQueryResult; + }); } - private List getDashboardUsageMetrics(String dashboardUrn, Long maybeStartTimeMillis, - Long maybeEndTimeMillis, Integer maybeLimit) { + private List getDashboardUsageMetrics( + String dashboardUrn, Long maybeStartTimeMillis, Long maybeEndTimeMillis, Integer maybeLimit) { List dashboardUsageMetrics; try { Filter filter = new Filter(); @@ -79,16 +85,26 @@ private List getDashboardUsageMetrics(String dashboardUrn // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); List aspects = - timeseriesAspectService.getAspectValues(Urn.createFromString(dashboardUrn), Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, maybeStartTimeMillis, maybeEndTimeMillis, maybeLimit, + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java index 462c18ea33dd44..4f170a296c47e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardUsageStatsUtils.java @@ -32,7 +32,6 @@ import java.util.List; import java.util.stream.Collectors; - public class DashboardUsageStatsUtils { public static final String ES_FIELD_URN = "urn"; @@ -49,15 +48,17 @@ public static List getDashboardUsageMetrics( List dashboardUsageMetrics; try { Filter filter = createUsageFilter(dashboardUrn, null, null, false); - List aspects = timeseriesAspectService.getAspectValues( - Urn.createFromString(dashboardUrn), - Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, - maybeStartTimeMillis, - maybeEndTimeMillis, - maybeLimit, - filter); - dashboardUsageMetrics = aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); + List aspects = + timeseriesAspectService.getAspectValues( + Urn.createFromString(dashboardUrn), + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + filter); + dashboardUsageMetrics = + aspects.stream().map(DashboardUsageMetricMapper::map).collect(Collectors.toList()); } catch (URISyntaxException e) { throw new IllegalArgumentException("Invalid resource", e); } @@ -69,8 +70,10 @@ public static DashboardUsageQueryResultAggregations getAggregations( List dailyUsageBuckets, TimeseriesAspectService timeseriesAspectService) { - List userUsageCounts = getUserUsageCounts(filter, timeseriesAspectService); - DashboardUsageQueryResultAggregations aggregations = new DashboardUsageQueryResultAggregations(); + List userUsageCounts = + getUserUsageCounts(filter, timeseriesAspectService); + DashboardUsageQueryResultAggregations aggregations = + new DashboardUsageQueryResultAggregations(); aggregations.setUsers(userUsageCounts); aggregations.setUniqueUserCount(userUsageCounts.size()); @@ -99,29 +102,47 @@ public static DashboardUsageQueryResultAggregations getAggregations( } public static List getBuckets( - Filter filter, - String dashboardUrn, - TimeseriesAspectService timeseriesAspectService) { + Filter filter, String dashboardUrn, TimeseriesAspectService timeseriesAspectService) { AggregationSpec usersCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountAggregation = new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("viewsCount"); AggregationSpec executionsCountAggregation = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("executionsCount"); AggregationSpec usersCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("uniqueUserCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("uniqueUserCount"); AggregationSpec viewsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("viewsCount"); AggregationSpec executionsCountCardinalityAggregation = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{usersCountAggregation, viewsCountAggregation, executionsCountAggregation, - usersCountCardinalityAggregation, viewsCountCardinalityAggregation, executionsCountCardinalityAggregation}; - GenericTable dailyStats = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, - createUsageGroupingBuckets(CalendarInterval.DAY)); + new AggregationSpec[] { + usersCountAggregation, + viewsCountAggregation, + executionsCountAggregation, + usersCountCardinalityAggregation, + viewsCountCardinalityAggregation, + executionsCountCardinalityAggregation + }; + GenericTable dailyStats = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + createUsageGroupingBuckets(CalendarInterval.DAY)); List buckets = new ArrayList<>(); for (StringArray row : dailyStats.getRows()) { @@ -130,7 +151,8 @@ public static List getBuckets( usageAggregation.setDuration(WindowDuration.DAY); usageAggregation.setResource(dashboardUrn); - DashboardUsageAggregationMetrics usageAggregationMetrics = new DashboardUsageAggregationMetrics(); + DashboardUsageAggregationMetrics usageAggregationMetrics = + new DashboardUsageAggregationMetrics(); if (!row.get(1).equals(ES_NULL_VALUE) && !row.get(4).equals(ES_NULL_VALUE)) { try { @@ -156,7 +178,8 @@ public static List getBuckets( usageAggregationMetrics.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert executionsCount from ES to object", e); + throw new IllegalArgumentException( + "Failed to convert executionsCount from ES to object", e); } } usageAggregation.setMetrics(usageAggregationMetrics); @@ -165,34 +188,59 @@ public static List getBuckets( return buckets; } - public static List getUserUsageCounts(Filter filter, TimeseriesAspectService timeseriesAspectService) { + public static List getUserUsageCounts( + Filter filter, TimeseriesAspectService timeseriesAspectService) { // Sum aggregation on userCounts.count AggregationSpec sumUsageCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.usageCount"); AggregationSpec sumViewCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.viewsCount"); AggregationSpec sumExecutionCountsCountAggSpec = - new AggregationSpec().setAggregationType(AggregationType.SUM).setFieldPath("userCounts.executionsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.SUM) + .setFieldPath("userCounts.executionsCount"); AggregationSpec usageCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.usageCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.usageCount"); AggregationSpec viewCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY).setFieldPath("userCounts.viewsCount"); + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) + .setFieldPath("userCounts.viewsCount"); AggregationSpec executionCountsCardinalityAggSpec = - new AggregationSpec().setAggregationType(AggregationType.CARDINALITY) + new AggregationSpec() + .setAggregationType(AggregationType.CARDINALITY) .setFieldPath("userCounts.executionsCount"); AggregationSpec[] aggregationSpecs = - new AggregationSpec[]{sumUsageCountsCountAggSpec, sumViewCountsCountAggSpec, sumExecutionCountsCountAggSpec, - usageCountsCardinalityAggSpec, viewCountsCardinalityAggSpec, executionCountsCardinalityAggSpec}; + new AggregationSpec[] { + sumUsageCountsCountAggSpec, + sumViewCountsCountAggSpec, + sumExecutionCountsCountAggSpec, + usageCountsCardinalityAggSpec, + viewCountsCardinalityAggSpec, + executionCountsCardinalityAggSpec + }; // String grouping bucket on userCounts.user GroupingBucket userGroupingBucket = - new GroupingBucket().setKey("userCounts.user").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - GroupingBucket[] groupingBuckets = new GroupingBucket[]{userGroupingBucket}; + new GroupingBucket() + .setKey("userCounts.user") + .setType(GroupingBucketType.STRING_GROUPING_BUCKET); + GroupingBucket[] groupingBuckets = new GroupingBucket[] {userGroupingBucket}; // Query backend - GenericTable result = timeseriesAspectService.getAggregatedStats(Constants.DASHBOARD_ENTITY_NAME, - Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, aggregationSpecs, filter, groupingBuckets); + GenericTable result = + timeseriesAspectService.getAggregatedStats( + Constants.DASHBOARD_ENTITY_NAME, + Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME, + aggregationSpecs, + filter, + groupingBuckets); // Process response List userUsageCounts = new ArrayList<>(); for (StringArray row : result.getRows()) { @@ -208,7 +256,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setUsageCount(Integer.valueOf(row.get(1))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user usage count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user usage count from ES to int", e); } } if (!row.get(2).equals(ES_NULL_VALUE) && row.get(5).equals(ES_NULL_VALUE)) { @@ -217,7 +266,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setViewsCount(Integer.valueOf(row.get(2))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user views count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user views count from ES to int", e); } } if (!row.get(3).equals(ES_NULL_VALUE) && !row.get(6).equals(ES_NULL_VALUE)) { @@ -226,7 +276,8 @@ public static List getUserUsageCounts(Filter filter, T userUsageCount.setExecutionsCount(Integer.valueOf(row.get(3))); } } catch (NumberFormatException e) { - throw new IllegalArgumentException("Failed to convert user executions count from ES to int", e); + throw new IllegalArgumentException( + "Failed to convert user executions count from ES to int", e); } } userUsageCounts.add(userUsageCount); @@ -239,17 +290,15 @@ public static List getUserUsageCounts(Filter filter, T private static GroupingBucket[] createUsageGroupingBuckets(CalendarInterval calenderInterval) { GroupingBucket timestampBucket = new GroupingBucket(); - timestampBucket.setKey(ES_FIELD_TIMESTAMP) + timestampBucket + .setKey(ES_FIELD_TIMESTAMP) .setType(GroupingBucketType.DATE_GROUPING_BUCKET) .setTimeWindowSize(new TimeWindowSize().setMultiple(1).setUnit(calenderInterval)); - return new GroupingBucket[]{timestampBucket}; + return new GroupingBucket[] {timestampBucket}; } public static Filter createUsageFilter( - String dashboardUrn, - Long startTime, - Long endTime, - boolean byBucket) { + String dashboardUrn, Long startTime, Long endTime, boolean byBucket) { Filter filter = new Filter(); final ArrayList criteria = new ArrayList<>(); @@ -260,44 +309,55 @@ public static Filter createUsageFilter( if (startTime != null) { // Add filter for start time - Criterion startTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) - .setValue(Long.toString(startTime)); + Criterion startTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.GREATER_THAN_OR_EQUAL_TO) + .setValue(Long.toString(startTime)); criteria.add(startTimeCriterion); } if (endTime != null) { // Add filter for end time - Criterion endTimeCriterion = new Criterion().setField(ES_FIELD_TIMESTAMP) - .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) - .setValue(Long.toString(endTime)); + Criterion endTimeCriterion = + new Criterion() + .setField(ES_FIELD_TIMESTAMP) + .setCondition(Condition.LESS_THAN_OR_EQUAL_TO) + .setValue(Long.toString(endTime)); criteria.add(endTimeCriterion); } if (byBucket) { - // Add filter for presence of eventGranularity - only consider bucket stats and not absolute stats + // Add filter for presence of eventGranularity - only consider bucket stats and not absolute + // stats // since unit is mandatory, we assume if eventGranularity contains unit, then it is not null Criterion onlyTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.CONTAIN).setValue("unit"); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.CONTAIN) + .setValue("unit"); criteria.add(onlyTimeBucketsCriterion); } else { // Add filter for absence of eventGranularity - only consider absolute stats Criterion excludeTimeBucketsCriterion = - new Criterion().setField(ES_FIELD_EVENT_GRANULARITY).setCondition(Condition.IS_NULL).setValue(""); + new Criterion() + .setField(ES_FIELD_EVENT_GRANULARITY) + .setCondition(Condition.IS_NULL) + .setValue(""); criteria.add(excludeTimeBucketsCriterion); } - filter.setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } - public static Long timeMinusOneMonth(long time) { final long oneHourMillis = 60 * 60 * 1000; final long oneDayMillis = 24 * oneHourMillis; return time - (31 * oneDayMillis + 1); } - private DashboardUsageStatsUtils() { } + private DashboardUsageStatsUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java index 9c32fa1c080762..f5d4f949e5710d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/BatchSetDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -8,15 +10,12 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -27,54 +26,80 @@ public class BatchSetDataProductResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDataProductInput input = bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); + final BatchSetDataProductInput input = + bindArgument(environment.getArgument("input"), BatchSetDataProductInput.class); final String maybeDataProductUrn = input.getDataProductUrn(); final List resources = input.getResourceUrns(); - return CompletableFuture.supplyAsync(() -> { - - verifyResources(resources, context); - verifyDataProduct(maybeDataProductUrn, context); + return CompletableFuture.supplyAsync( + () -> { + verifyResources(resources, context); + verifyDataProduct(maybeDataProductUrn, context); - try { - List resourceUrns = resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (maybeDataProductUrn != null) { - batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); - } else { - batchUnsetDataProduct(resourceUrns, context); - } - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + List resourceUrns = + resources.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + if (maybeDataProductUrn != null) { + batchSetDataProduct(maybeDataProductUrn, resourceUrns, context); + } else { + batchUnsetDataProduct(resourceUrns, context); + } + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void verifyResources(List resources, QueryContext context) { for (String resource : resources) { - if (!_dataProductService.verifyEntityExists(UrnUtils.getUrn(resource), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, %s in resources does not exist", resource)); + if (!_dataProductService.verifyEntityExists( + UrnUtils.getUrn(resource), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, %s in resources does not exist", resource)); } Urn resourceUrn = UrnUtils.getUrn(resource); - if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!DataProductAuthorizationUtils.isAuthorizedToUpdateDataProductsForEntity( + context, resourceUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } } private void verifyDataProduct(String maybeDataProductUrn, QueryContext context) { - if (maybeDataProductUrn != null && !_dataProductService.verifyEntityExists(UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { - throw new RuntimeException(String.format("Failed to batch set Data Product, Data Product urn %s does not exist", maybeDataProductUrn)); + if (maybeDataProductUrn != null + && !_dataProductService.verifyEntityExists( + UrnUtils.getUrn(maybeDataProductUrn), context.getAuthentication())) { + throw new RuntimeException( + String.format( + "Failed to batch set Data Product, Data Product urn %s does not exist", + maybeDataProductUrn)); } } - private void batchSetDataProduct(@Nonnull String dataProductUrn, List resources, QueryContext context) { - log.debug("Batch setting Data Product. dataProduct urn: {}, resources: {}", dataProductUrn, resources); + private void batchSetDataProduct( + @Nonnull String dataProductUrn, List resources, QueryContext context) { + log.debug( + "Batch setting Data Product. dataProduct urn: {}, resources: {}", + dataProductUrn, + resources); try { - _dataProductService.batchSetDataProduct(UrnUtils.getUrn(dataProductUrn), resources, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.batchSetDataProduct( + UrnUtils.getUrn(dataProductUrn), + resources, + context.getAuthentication(), + UrnUtils.getUrn(context.getActorUrn())); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Data Product %s to resources with urns %s!", dataProductUrn, resources), e); + throw new RuntimeException( + String.format( + "Failed to batch set Data Product %s to resources with urns %s!", + dataProductUrn, resources), + e); } } @@ -82,10 +107,14 @@ private void batchUnsetDataProduct(List resources, QueryContext context) { log.debug("Batch unsetting Data Product. resources: {}", resources); try { for (Urn resource : resources) { - _dataProductService.unsetDataProduct(resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); + _dataProductService.unsetDataProduct( + resource, context.getAuthentication(), UrnUtils.getUrn(context.getActorUrn())); } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch unset data product for resources with urns %s!", resources), e); + throw new RuntimeException( + String.format( + "Failed to batch unset data product for resources with urns %s!", resources), + e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java index f644ff31a571b4..8ac7b2c3ce3754 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/CreateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -12,13 +14,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class CreateDataProductResolver implements DataFetcher> { @@ -26,37 +25,46 @@ public class CreateDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateDataProductInput input = bindArgument(environment.getArgument("input"), CreateDataProductInput.class); + final CreateDataProductInput input = + bindArgument(environment.getArgument("input"), CreateDataProductInput.class); final Authentication authentication = context.getAuthentication(); final Urn domainUrn = UrnUtils.getUrn(input.getDomainUrn()); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Domain provided dos not exist"); - } - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - final Urn dataProductUrn = _dataProductService.createDataProduct( - input.getProperties().getName(), - input.getProperties().getDescription(), - authentication); - _dataProductService.setDomain(dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new DataProduct from input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists(domainUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Domain provided dos not exist"); + } + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + final Urn dataProductUrn = + _dataProductService.createDataProduct( + input.getId(), + input.getProperties().getName(), + input.getProperties().getDescription(), + authentication); + _dataProductService.setDomain( + dataProductUrn, UrnUtils.getUrn(input.getDomainUrn()), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(dataProductUrn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new DataProduct from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java index 596e292e7fe337..f6fe11a587a39b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DataProductAuthorizationUtils.java @@ -7,25 +7,27 @@ import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.metadata.authorization.PoliciesConfig; -import lombok.extern.slf4j.Slf4j; - import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; @Slf4j public class DataProductAuthorizationUtils { - private DataProductAuthorizationUtils() { + private DataProductAuthorizationUtils() {} - } - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDataProductsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_ENTITY_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -35,11 +37,14 @@ public static boolean isAuthorizedToUpdateDataProductsForEntity(@Nonnull QueryCo orPrivilegeGroups); } - public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext context, Urn domainUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToManageDataProducts( + @Nonnull QueryContext context, Urn domainUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.MANAGE_DATA_PRODUCTS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -49,10 +54,10 @@ public static boolean isAuthorizedToManageDataProducts(@Nonnull QueryContext con orPrivilegeGroups); } - public static boolean isAuthorizedToEditDataProduct(@Nonnull QueryContext context, Urn dataProductUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP - )); + public static boolean isAuthorizedToEditDataProduct( + @Nonnull QueryContext context, Urn dataProductUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup(ImmutableList.of(ALL_PRIVILEGES_GROUP)); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java index fd31e2199c22a1..ea13f96cfc1bf1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/DeleteDataProductResolver.java @@ -9,11 +9,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class DeleteDataProductResolver implements DataFetcher> { @@ -21,32 +20,38 @@ public class DeleteDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } - - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } - - try { - _dataProductService.deleteDataProduct(dataProductUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Data Product", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } + + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } + + try { + _dataProductService.deleteDataProduct(dataProductUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Data Product", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java index 831d449bef9ef6..a0f1698bf99e82 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/ListDataProductAssetsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -22,18 +25,14 @@ import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.buildFilterWithUrns; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** * Resolver responsible for getting the assets belonging to a Data Product. Get the assets from the @@ -41,7 +40,8 @@ */ @Slf4j @RequiredArgsConstructor -public class ListDataProductAssetsResolver implements DataFetcher> { +public class ListDataProductAssetsResolver + implements DataFetcher> { private static final int DEFAULT_START = 0; private static final int DEFAULT_COUNT = 10; @@ -52,7 +52,10 @@ public class ListDataProductAssetsResolver implements DataFetcher get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); // get urn from either input or source (in the case of "entities" field) - final String urn = environment.getArgument("urn") != null ? environment.getArgument("urn") : ((DataProduct) environment.getSource()).getUrn(); + final String urn = + environment.getArgument("urn") != null + ? environment.getArgument("urn") + : ((DataProduct) environment.getSource()).getUrn(); final Urn dataProductUrn = UrnUtils.getUrn(urn); final SearchAcrossEntitiesInput input = bindArgument(environment.getArgument("input"), SearchAcrossEntitiesInput.class); @@ -60,32 +63,52 @@ public CompletableFuture get(DataFetchingEnvironment environment) // 1. Get urns of assets belonging to Data Product using an aspect query List assetUrns = new ArrayList<>(); try { - final EntityResponse entityResponse = _entityClient.getV2( - Constants.DATA_PRODUCT_ENTITY_NAME, - dataProductUrn, - Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - if (entityResponse != null && entityResponse.getAspects().containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { - final DataMap data = entityResponse.getAspects().get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME).getValue().data(); + final EntityResponse entityResponse = + _entityClient.getV2( + Constants.DATA_PRODUCT_ENTITY_NAME, + dataProductUrn, + Collections.singleton(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse != null + && entityResponse + .getAspects() + .containsKey(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME)) { + final DataMap data = + entityResponse + .getAspects() + .get(Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); final DataProductProperties dataProductProperties = new DataProductProperties(data); if (dataProductProperties.hasAssets()) { - assetUrns.addAll(dataProductProperties.getAssets().stream().map(DataProductAssociation::getDestinationUrn).collect(Collectors.toList())); + assetUrns.addAll( + dataProductProperties.getAssets().stream() + .map(DataProductAssociation::getDestinationUrn) + .collect(Collectors.toList())); } } } catch (Exception e) { log.error(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); - throw new RuntimeException(String.format("Failed to list data product assets with urn %s", dataProductUrn), e); + throw new RuntimeException( + String.format("Failed to list data product assets with urn %s", dataProductUrn), e); } // 2. Get list of entities that we should query based on filters or assets from aspect. - List entitiesToQuery = assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); - - - final List inputEntityTypes = (input.getTypes() == null || input.getTypes().isEmpty()) ? ImmutableList.of() : input.getTypes(); - final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).distinct().collect(Collectors.toList()); - - final List finalEntityNames = inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; + List entitiesToQuery = + assetUrns.stream().map(Urn::getEntityType).distinct().collect(Collectors.toList()); + + final List inputEntityTypes = + (input.getTypes() == null || input.getTypes().isEmpty()) + ? ImmutableList.of() + : input.getTypes(); + final List inputEntityNames = + inputEntityTypes.stream() + .map(EntityTypeMapper::getName) + .distinct() + .collect(Collectors.toList()); + + final List finalEntityNames = + inputEntityNames.size() > 0 ? inputEntityNames : entitiesToQuery; // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); @@ -93,49 +116,64 @@ public CompletableFuture get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - // if no assets in data product properties, exit early before search and return empty results - if (assetUrns.size() == 0) { - SearchResults results = new SearchResults(); - results.setStart(start); - results.setCount(count); - results.setTotal(0); - results.setSearchResults(ImmutableList.of()); - return results; - } - - // add urns from the aspect to our filters - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); - - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } - - try { - log.debug( - "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - finalEntityNames, - sanitizedQuery, - finalFilter, - start, - count, - searchFlags, - null, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + // if no assets in data product properties, exit early before search and return empty + // results + if (assetUrns.size() == 0) { + SearchResults results = new SearchResults(); + results.setStart(start); + results.setCount(count); + results.setTotal(0); + results.setSearchResults(ImmutableList.of()); + return results; + } + + // add urns from the aspect to our filters + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = buildFilterWithUrns(new HashSet<>(assetUrns), baseFilter); + + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } + + try { + log.debug( + "Executing search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + finalEntityNames, + sanitizedQuery, + finalFilter, + start, + count, + searchFlags, + null, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for data product assets: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java index 79afddbb873fbc..304ef96d90aa51 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataproduct/UpdateDataProductResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataproduct; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,13 +15,10 @@ import com.linkedin.metadata.service.DataProductService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - @Slf4j @RequiredArgsConstructor public class UpdateDataProductResolver implements DataFetcher> { @@ -27,43 +26,51 @@ public class UpdateDataProductResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateDataProductInput input = bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); + final UpdateDataProductInput input = + bindArgument(environment.getArgument("input"), UpdateDataProductInput.class); final Urn dataProductUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - if (!_dataProductService.verifyEntityExists(dataProductUrn, context.getAuthentication())) { - throw new IllegalArgumentException("The Data Product provided dos not exist"); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_dataProductService.verifyEntityExists( + dataProductUrn, context.getAuthentication())) { + throw new IllegalArgumentException("The Data Product provided dos not exist"); + } - Domains domains = _dataProductService.getDataProductDomains(dataProductUrn, context.getAuthentication()); - if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { - // get first domain since we only allow one domain right now - Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); - if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - } + Domains domains = + _dataProductService.getDataProductDomains( + dataProductUrn, context.getAuthentication()); + if (domains != null && domains.hasDomains() && domains.getDomains().size() > 0) { + // get first domain since we only allow one domain right now + Urn domainUrn = UrnUtils.getUrn(domains.getDomains().get(0).toString()); + if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts( + context, domainUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + } - try { - final Urn urn = _dataProductService.updateDataProduct( - dataProductUrn, - input.getName(), - input.getDescription(), - authentication); - EntityResponse response = _dataProductService.getDataProductEntityResponse(urn, authentication); - if (response != null) { - return DataProductMapper.map(response); - } - // should never happen - log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); - return null; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); - } - }); + try { + final Urn urn = + _dataProductService.updateDataProduct( + dataProductUrn, input.getName(), input.getDescription(), authentication); + EntityResponse response = + _dataProductService.getDataProductEntityResponse(urn, authentication); + if (response != null) { + return DataProductMapper.map(response); + } + // should never happen + log.error(String.format("Unable to find data product with urn %s", dataProductUrn)); + return null; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update DataProduct with urn %s", dataProductUrn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java index 1587df4c9899b3..604c46a1f7c010 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolver.java @@ -39,13 +39,11 @@ import lombok.Data; import lombok.extern.slf4j.Slf4j; - /** * Resolver used for resolving the Health state of a Dataset. * - * Currently, the health status is calculated via the validation on a Dataset. If there are no validations found, the - * health status will be undefined for the Dataset. - * + *

Currently, the health status is calculated via the validation on a Dataset. If there are no + * validations found, the health status will be undefined for the Dataset. */ @Slf4j public class DatasetHealthResolver implements DataFetcher>> { @@ -60,47 +58,48 @@ public class DatasetHealthResolver implements DataFetcher _statusCache; public DatasetHealthResolver( - final GraphClient graphClient, - final TimeseriesAspectService timeseriesAspectService) { + final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService) { this(graphClient, timeseriesAspectService, new Config(true)); - } + public DatasetHealthResolver( final GraphClient graphClient, final TimeseriesAspectService timeseriesAspectService, final Config config) { _graphClient = graphClient; _timeseriesAspectService = timeseriesAspectService; - _statusCache = CacheBuilder.newBuilder() - .maximumSize(10000) - .expireAfterWrite(1, TimeUnit.MINUTES) - .build(); + _statusCache = + CacheBuilder.newBuilder().maximumSize(10000).expireAfterWrite(1, TimeUnit.MINUTES).build(); _config = config; } @Override - public CompletableFuture> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { final Dataset parent = environment.getSource(); - return CompletableFuture.supplyAsync(() -> { - try { - final CachedHealth cachedStatus = _statusCache.get(parent.getUrn(), () -> ( - computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); - return cachedStatus.healths; - } catch (Exception e) { - throw new RuntimeException("Failed to resolve dataset's health status.", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final CachedHealth cachedStatus = + _statusCache.get( + parent.getUrn(), + () -> + (computeHealthStatusForDataset(parent.getUrn(), environment.getContext()))); + return cachedStatus.healths; + } catch (Exception e) { + throw new RuntimeException("Failed to resolve dataset's health status.", e); + } + }); } /** * Computes the "resolved health status" for a Dataset by * - * - fetching active (non-deleted) assertions - * - fetching latest assertion run for each - * - checking whether any of the assertions latest runs are failing - * + *

- fetching active (non-deleted) assertions - fetching latest assertion run for each - + * checking whether any of the assertions latest runs are failing */ - private CachedHealth computeHealthStatusForDataset(final String datasetUrn, final QueryContext context) { + private CachedHealth computeHealthStatusForDataset( + final String datasetUrn, final QueryContext context) { final List healthStatuses = new ArrayList<>(); if (_config.getAssertionsEnabled()) { @@ -113,31 +112,33 @@ private CachedHealth computeHealthStatusForDataset(final String datasetUrn, fina } /** - * Returns the resolved "assertions health", which is currently a static function of whether the most recent run of - * all dataset assertions has succeeded. + * Returns the resolved "assertions health", which is currently a static function of whether the + * most recent run of all dataset assertions has succeeded. * * @param datasetUrn the dataset to compute health for * @param context the query context * @return an instance of {@link Health} for the Dataset, null if one cannot be computed. */ @Nullable - private Health computeAssertionHealthForDataset(final String datasetUrn, final QueryContext context) { + private Health computeAssertionHealthForDataset( + final String datasetUrn, final QueryContext context) { // Get active assertion urns - final EntityRelationships relationships = _graphClient.getRelatedEntities( - datasetUrn, - ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), - RelationshipDirection.INCOMING, - 0, - 500, - context.getActorUrn() - ); + final EntityRelationships relationships = + _graphClient.getRelatedEntities( + datasetUrn, + ImmutableList.of(ASSERTS_RELATIONSHIP_NAME), + RelationshipDirection.INCOMING, + 0, + 500, + context.getActorUrn()); if (relationships.getTotal() > 0) { // If there are assertions defined, then we should return a non-null health for this asset. - final Set activeAssertionUrns = relationships.getRelationships() - .stream() - .map(relationship -> relationship.getEntity().toString()).collect(Collectors.toSet()); + final Set activeAssertionUrns = + relationships.getRelationships().stream() + .map(relationship -> relationship.getEntity().toString()) + .collect(Collectors.toSet()); final GenericTable assertionRunResults = getAssertionRunsTable(datasetUrn); @@ -146,22 +147,24 @@ private Health computeAssertionHealthForDataset(final String datasetUrn, final Q return null; } - final List failingAssertionUrns = getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); + final List failingAssertionUrns = + getFailingAssertionUrns(assertionRunResults, activeAssertionUrns); // Finally compute & return the health. final Health health = new Health(); health.setType(HealthStatusType.ASSERTIONS); if (failingAssertionUrns.size() > 0) { health.setStatus(HealthStatus.FAIL); - health.setMessage(String.format("%s of %s assertions are failing", failingAssertionUrns.size(), - activeAssertionUrns.size())); + health.setMessage( + String.format( + "%s of %s assertions are failing", + failingAssertionUrns.size(), activeAssertionUrns.size())); health.setCauses(failingAssertionUrns); } else { health.setStatus(HealthStatus.PASS); health.setMessage("All assertions are passing"); } return health; - } return null; } @@ -175,7 +178,8 @@ private GenericTable getAssertionRunsTable(final String asserteeUrn) { createAssertionGroupingBuckets()); } - private List getFailingAssertionUrns(final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { + private List getFailingAssertionUrns( + final GenericTable assertionRunsResult, final Set candidateAssertionUrns) { // Create the buckets based on the result return resultToFailedAssertionUrns(assertionRunsResult.getRows(), candidateAssertionUrns); } @@ -191,12 +195,15 @@ private Filter createAssertionsFilter(final String datasetUrn) { // Add filter for result == result Criterion startTimeCriterion = - new Criterion().setField("status").setCondition(Condition.EQUAL).setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); + new Criterion() + .setField("status") + .setCondition(Condition.EQUAL) + .setValue(Constants.ASSERTION_RUN_EVENT_STATUS_COMPLETE); criteria.add(startTimeCriterion); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(criteria)) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(new CriterionArray(criteria))))); return filter; } @@ -205,31 +212,38 @@ private AggregationSpec[] createAssertionAggregationSpecs() { AggregationSpec resultTypeAggregation = new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("type"); AggregationSpec timestampAggregation = - new AggregationSpec().setAggregationType(AggregationType.LATEST).setFieldPath("timestampMillis"); - return new AggregationSpec[]{resultTypeAggregation, timestampAggregation}; + new AggregationSpec() + .setAggregationType(AggregationType.LATEST) + .setFieldPath("timestampMillis"); + return new AggregationSpec[] {resultTypeAggregation, timestampAggregation}; } private GroupingBucket[] createAssertionGroupingBuckets() { // String grouping bucket on "assertionUrn" GroupingBucket assertionUrnBucket = new GroupingBucket(); assertionUrnBucket.setKey("assertionUrn").setType(GroupingBucketType.STRING_GROUPING_BUCKET); - return new GroupingBucket[]{assertionUrnBucket}; + return new GroupingBucket[] {assertionUrnBucket}; } - private List resultToFailedAssertionUrns(final StringArrayArray rows, final Set activeAssertionUrns) { + private List resultToFailedAssertionUrns( + final StringArrayArray rows, final Set activeAssertionUrns) { final List failedAssertionUrns = new ArrayList<>(); for (StringArray row : rows) { // Result structure should be assertionUrn, event.result.type, timestampMillis if (row.size() != 3) { - throw new RuntimeException(String.format( - "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", row.size())); + throw new RuntimeException( + String.format( + "Failed to fetch assertion run events from Timeseries index! Expected row of size 3, found %s", + row.size())); } final String assertionUrn = row.get(0); final String resultType = row.get(1); - // If assertion is "active" (not deleted) & is failing, then we report a degradation in health. - if (activeAssertionUrns.contains(assertionUrn) && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { + // If assertion is "active" (not deleted) & is failing, then we report a degradation in + // health. + if (activeAssertionUrns.contains(assertionUrn) + && !ASSERTION_RUN_EVENT_SUCCESS_TYPE.equals(resultType)) { failedAssertionUrns.add(assertionUrn); } } @@ -246,4 +260,4 @@ public static class Config { private static class CachedHealth { private final List healths; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java index 2873866bb34f73..74fbd9c2c868a4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolver.java @@ -24,13 +24,13 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - /** * This resolver is a thin wrapper around the {@link DatasetUsageStatsResolver} which simply * computes some aggregate usage metrics for a Dashboard. */ @Slf4j -public class DatasetStatsSummaryResolver implements DataFetcher> { +public class DatasetStatsSummaryResolver + implements DataFetcher> { // The maximum number of top users to show in the summary stats private static final Integer MAX_TOP_USERS = 5; @@ -40,53 +40,64 @@ public class DatasetStatsSummaryResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (this.summaryCache.getIfPresent(resourceUrn) != null) { - return this.summaryCache.getIfPresent(resourceUrn); - } + return CompletableFuture.supplyAsync( + () -> { + if (this.summaryCache.getIfPresent(resourceUrn) != null) { + return this.summaryCache.getIfPresent(resourceUrn); + } - try { + try { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view profile information for dataset {}", + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view profile information for dataset {}", context.getActorUrn(), resourceUrn.toString()); - return null; - } - - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); - - final DatasetStatsSummary result = new DatasetStatsSummary(); - result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); - result.setUniqueUserCountLast30Days(usageQueryResult.getAggregations().getUniqueUserCount()); - if (usageQueryResult.getAggregations().hasUsers()) { - result.setTopUsersLast30Days(trimUsers(usageQueryResult.getAggregations().getUsers() - .stream() - .filter(UserUsageCounts::hasUser) - .sorted((a, b) -> (b.getCount() - a.getCount())) - .map(userCounts -> createPartialUser(Objects.requireNonNull(userCounts.getUser()))) - .collect(Collectors.toList()))); - } - this.summaryCache.put(resourceUrn, result); - return result; - } catch (Exception e) { - log.error(String.format("Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), e); - return null; // Do not throw when loading usage summary fails. - } - }); + return null; + } + + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), UsageTimeRange.MONTH); + + final DatasetStatsSummary result = new DatasetStatsSummary(); + result.setQueryCountLast30Days(usageQueryResult.getAggregations().getTotalSqlQueries()); + result.setUniqueUserCountLast30Days( + usageQueryResult.getAggregations().getUniqueUserCount()); + if (usageQueryResult.getAggregations().hasUsers()) { + result.setTopUsersLast30Days( + trimUsers( + usageQueryResult.getAggregations().getUsers().stream() + .filter(UserUsageCounts::hasUser) + .sorted((a, b) -> (b.getCount() - a.getCount())) + .map( + userCounts -> + createPartialUser(Objects.requireNonNull(userCounts.getUser()))) + .collect(Collectors.toList()))); + } + this.summaryCache.put(resourceUrn, result); + return result; + } catch (Exception e) { + log.error( + String.format( + "Failed to load Usage Stats summary for resource %s", resourceUrn.toString()), + e); + return null; // Do not throw when loading usage summary fails. + } + }); } private List trimUsers(final List originalUsers) { @@ -103,8 +114,9 @@ private CorpUser createPartialUser(final Urn userUrn) { } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, - Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), - PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), + PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java index e4bec8e896fdf7..75288ec989c79d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetUsageStatsResolver.java @@ -17,7 +17,6 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DatasetUsageStatsResolver implements DataFetcher> { @@ -28,30 +27,35 @@ public DatasetUsageStatsResolver(final UsageClient usageClient) { } @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); final UsageTimeRange range = UsageTimeRange.valueOf(environment.getArgument("range")); - return CompletableFuture.supplyAsync(() -> { - if (!isAuthorized(resourceUrn, context)) { - log.debug("User {} is not authorized to view usage information for dataset {}", - context.getActorUrn(), - resourceUrn.toString()); - return null; - } - try { - com.linkedin.usage.UsageQueryResult - usageQueryResult = usageClient.getUsageStats(resourceUrn.toString(), range); - return UsageQueryResultMapper.map(usageQueryResult); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorized(resourceUrn, context)) { + log.debug( + "User {} is not authorized to view usage information for dataset {}", + context.getActorUrn(), + resourceUrn.toString()); + return null; + } + try { + com.linkedin.usage.UsageQueryResult usageQueryResult = + usageClient.getUsageStats(resourceUrn.toString(), range); + return UsageQueryResultMapper.map(usageQueryResult); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load Usage Stats for resource %s", resourceUrn), e); + } + }); } private boolean isAuthorized(final Urn resourceUrn, final QueryContext context) { - return AuthorizationUtils.isAuthorized(context, + return AuthorizationUtils.isAuthorized( + context, Optional.of(new EntitySpec(resourceUrn.getEntityType(), resourceUrn.toString())), PoliciesConfig.VIEW_DATASET_USAGE_PRIVILEGE); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java index 75c09d0cf7e437..62c88c506ba613 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolver.java @@ -1,16 +1,20 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.UpdateDeprecationInput; import com.linkedin.datahub.graphql.resolvers.AuthUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -23,13 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor @@ -37,48 +37,61 @@ public class UpdateDeprecationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateDeprecationInput input = bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); + final UpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), UpdateDeprecationInput.class); final Urn entityUrn = Urn.createFromString(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateDeprecationInput( - entityUrn, - _entityService - ); - try { - Deprecation deprecation = (Deprecation) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DEPRECATION_ASPECT_NAME, - _entityService, - new Deprecation()); - updateDeprecation(deprecation, input, context); - - // Create the Deprecation aspect - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(entityUrn, DEPRECATION_ASPECT_NAME, deprecation); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to update Deprecation for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to update Deprecation for resource with entity urn %s", entityUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!isAuthorizedToUpdateDeprecationForEntity(environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateDeprecationInput(entityUrn, _entityService); + try { + Deprecation deprecation = + (Deprecation) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), + DEPRECATION_ASPECT_NAME, + _entityService, + new Deprecation()); + updateDeprecation(deprecation, input, context); + + // Create the Deprecation aspect + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + entityUrn, DEPRECATION_ASPECT_NAME, deprecation); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to update Deprecation for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to update Deprecation for resource with entity urn %s", entityUrn), + e); + } + }); } - private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext context, final Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToUpdateDeprecationForEntity( + final QueryContext context, final Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -88,20 +101,19 @@ private boolean isAuthorizedToUpdateDeprecationForEntity(final QueryContext cont orPrivilegeGroups); } - public static Boolean validateUpdateDeprecationInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUpdateDeprecationInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); + String.format( + "Failed to update deprecation for Entity %s. Entity does not exist.", entityUrn)); } return true; } - private static void updateDeprecation(Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { + private static void updateDeprecation( + Deprecation deprecation, UpdateDeprecationInput input, QueryContext context) { deprecation.setDeprecated(input.getDeprecated()); deprecation.setDecommissionTime(input.getDecommissionTime(), SetMode.REMOVE_IF_NULL); if (input.getNote() != null) { @@ -115,9 +127,10 @@ private static void updateDeprecation(Deprecation deprecation, UpdateDeprecation } catch (URISyntaxException e) { // Should never happen. throw new RuntimeException( - String.format("Failed to convert authorized actor into an Urn. actor urn: %s", - context.getActorUrn()), + String.format( + "Failed to convert authorized actor into an Urn. actor urn: %s", + context.getActorUrn()), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java index 1930cdc1f86676..9099394d32bd0b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -23,22 +28,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS privilege. + * Resolver used for creating a new Domain on DataHub. Requires the CREATE_DOMAINS or MANAGE_DOMAINS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -51,71 +49,101 @@ public class CreateDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateDomainInput input = bindArgument(environment.getArgument("input"), CreateDomainInput.class); - final Urn parentDomain = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canCreateDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - try { - // Create the Domain Key - final DomainKey key = new DomainKey(); - - // Take user provided id OR generate a random UUID for the domain. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setId(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Domain already exists!"); - } - - if (parentDomain != null && !_entityClient.exists(parentDomain, context.getAuthentication())) { - throw new IllegalArgumentException("Parent Domain does not exist!"); - } - - if (DomainUtils.hasNameConflict(input.getName(), parentDomain, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, DOMAIN_ENTITY_NAME, - DOMAIN_PROPERTIES_ASPECT_NAME, mapDomainProperties(input, context)); - proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); - - String domainUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - OwnerUtils.addCreatorAsOwner(context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return domainUrn; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to create Domain with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Domain with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + final CreateDomainInput input = + bindArgument(environment.getArgument("input"), CreateDomainInput.class); + final Urn parentDomain = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + try { + // Create the Domain Key + final DomainKey key = new DomainKey(); + + // Take user provided id OR generate a random UUID for the domain. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setId(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, DOMAIN_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Domain already exists!"); + } + + if (parentDomain != null + && !_entityClient.exists(parentDomain, context.getAuthentication())) { + throw new IllegalArgumentException("Parent Domain does not exist!"); + } + + if (DomainUtils.hasNameConflict( + input.getName(), parentDomain, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + DOMAIN_ENTITY_NAME, + DOMAIN_PROPERTIES_ASPECT_NAME, + mapDomainProperties(input, context)); + proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)); + + String domainUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + OwnerUtils.addCreatorAsOwner( + context, domainUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return domainUrn; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to create Domain with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Domain with id: %s, name: %s", + input.getId(), input.getName()), + e); + } + }); } - private DomainProperties mapDomainProperties(final CreateDomainInput input, final QueryContext context) { + private DomainProperties mapDomainProperties( + final CreateDomainInput input, final QueryContext context) { final DomainProperties result = new DomainProperties(); result.setName(input.getName()); result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - result.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + result.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); if (input.getParentDomain() != null) { try { result.setParentDomain(Urn.createFromString(input.getParentDomain())); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), e); + throw new RuntimeException( + String.format("Failed to create Domain Urn from string: %s", input.getParentDomain()), + e); } } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java index 9ab90e8b4ff72c..c863f2e581dcb4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteDomainResolver implements DataFetcher> { @@ -25,37 +22,49 @@ public DeleteDomainResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String domainUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(domainUrn); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageDomains(context) + || AuthorizationUtils.canDeleteEntity(urn, context)) { + try { + // Make sure there are no child domains + if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { + throw new RuntimeException( + String.format("Cannot delete domain %s which has child domains", domainUrn)); + } - if (AuthorizationUtils.canManageDomains(context) || AuthorizationUtils.canDeleteEntity(urn, context)) { - try { - // Make sure there are no child domains - if (DomainUtils.hasChildDomains(urn, context, _entityClient)) { - throw new RuntimeException(String.format("Cannot delete domain %s which has child domains", domainUrn)); - } + _entityClient.deleteEntity(urn, context.getAuthentication()); + log.info( + String.format("I've successfully deleted the entity %s with urn", domainUrn)); - _entityClient.deleteEntity(urn, context.getAuthentication()); - log.info(String.format("I've successfully deleted the entity %s with urn", domainUrn)); + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Domain with urn %s", + urn), + e); + } + }); - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { - try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for Domain with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", domainUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", domainUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java index 0bf551c4683e61..8f6d109e71b2c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Domain; import com.linkedin.datahub.graphql.generated.DomainEntitiesInput; @@ -19,13 +22,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolves the entities in a particular Domain. - */ +/** Resolves the entities in a particular Domain. */ @Slf4j public class DomainEntitiesResolver implements DataFetcher> { @@ -49,50 +46,65 @@ public DomainEntitiesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urn = ((Domain) environment.getSource()).getUrn(); - final DomainEntitiesInput input = environment.getArgument(INPUT_ARG_NAME) != null - ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) - : DEFAULT_ENTITIES_INPUT; + final DomainEntitiesInput input = + environment.getArgument(INPUT_ARG_NAME) != null + ? bindArgument(environment.getArgument(INPUT_ARG_NAME), DomainEntitiesInput.class) + : DEFAULT_ENTITIES_INPUT; final String query = input.getQuery() != null ? input.getQuery() : DEFAULT_QUERY; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - try { - - final CriterionArray criteria = new CriterionArray(); - final Criterion filterCriterion = new Criterion() - .setField(DOMAINS_FIELD_NAME + ".keyword") - .setCondition(Condition.EQUAL) - .setValue(urn); - criteria.add(filterCriterion); - if (input.getFilters() != null) { - input.getFilters().forEach(filter -> { - criteria.add(new Criterion().setField(filter.getField()).setValue(filter.getValue())); - }); - } - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - query, - new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(criteria))), - start, - count, - null, - null, - context.getAuthentication() - )); - - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve entities associated with Domain with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + + final CriterionArray criteria = new CriterionArray(); + final Criterion filterCriterion = + new Criterion() + .setField(DOMAINS_FIELD_NAME + ".keyword") + .setCondition(Condition.EQUAL) + .setValue(urn); + criteria.add(filterCriterion); + if (input.getFilters() != null) { + input + .getFilters() + .forEach( + filter -> { + criteria.add( + new Criterion() + .setField(filter.getField()) + .setValue(filter.getValue())); + }); + } + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + query, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion().setAnd(criteria))), + start, + count, + null, + null, + context.getAuthentication())); + + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to resolve entities associated with Domain with urn %s", urn), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java index 3a751e502eb10a..5453603f4cc9f9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,18 +21,14 @@ import com.linkedin.metadata.search.SearchResult; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Domains defined within DataHub. Requires the MANAGE_DOMAINS + * platform privilege. */ public class ListDomainsResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -43,47 +42,56 @@ public ListDomainsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - final ListDomainsInput input = bindArgument(environment.getArgument("input"), ListDomainsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final Urn parentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); + return CompletableFuture.supplyAsync( + () -> { + final ListDomainsInput input = + bindArgument(environment.getArgument("input"), ListDomainsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final Urn parentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Filter filter = DomainUtils.buildParentDomainFilter(parentDomainUrn); - try { - // First, get all domain Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.DOMAIN_ENTITY_NAME, - query, - filter, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all domain Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.DOMAIN_ENTITY_NAME, + query, + filter, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListDomainsResult result = new ListDomainsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setDomains(mapUnresolvedDomains(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list domains", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListDomainsResult result = new ListDomainsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setDomains( + mapUnresolvedDomains( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list domains", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Domain objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Domain objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedDomains(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java index dcaa7d61ed90cd..8406e198104689 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -9,51 +11,53 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; - public class ParentDomainsResolver implements DataFetcher> { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public ParentDomainsResolver(final EntityClient entityClient) { - _entityClient = entityClient; + public ParentDomainsResolver(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); + final List parentDomains = new ArrayList<>(); + final Set visitedParentUrns = new HashSet<>(); + + if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { + throw new IllegalArgumentException( + String.format("Failed to resolve parents for entity type %s", urn)); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final Urn urn = UrnUtils.getUrn(((Entity) environment.getSource()).getUrn()); - final List parentDomains = new ArrayList<>(); - final Set visitedParentUrns = new HashSet<>(); - - if (!DOMAIN_ENTITY_NAME.equals(urn.getEntityType())) { - throw new IllegalArgumentException(String.format("Failed to resolve parents for entity type %s", urn)); - } - - return CompletableFuture.supplyAsync(() -> { - try { - Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); - - while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { - parentDomains.add(parentDomain); - visitedParentUrns.add(parentDomain.getUrn()); - parentDomain = DomainUtils.getParentDomain(Urn.createFromString(parentDomain.getUrn()), context, _entityClient); - } - - final ParentDomainsResult result = new ParentDomainsResult(); - result.setCount(parentDomains.size()); - result.setDomains(parentDomains); - return result; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to load parent domains for entity %s", urn), e); + return CompletableFuture.supplyAsync( + () -> { + try { + Entity parentDomain = DomainUtils.getParentDomain(urn, context, _entityClient); + + while (parentDomain != null && !visitedParentUrns.contains(parentDomain.getUrn())) { + parentDomains.add(parentDomain); + visitedParentUrns.add(parentDomain.getUrn()); + parentDomain = + DomainUtils.getParentDomain( + Urn.createFromString(parentDomain.getUrn()), context, _entityClient); } + + final ParentDomainsResult result = new ParentDomainsResult(); + result.setCount(parentDomains.size()); + result.setDomains(parentDomains); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to load parent domains for entity %s", urn), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java index 56a76dcb1e07fa..1c52f707c61a4a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,19 +19,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,49 +38,56 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); final Urn domainUrn = Urn.createFromString(environment.getArgument("domainUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateSetDomainInput( - entityUrn, - domainUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - setDomain(domains, domainUrn); + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateSetDomainInput(entityUrn, domainUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + setDomain(domains, domainUrn); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set Domain to resource with entity urn {}, domain urn {}: {}", entityUrn, domainUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set Domain to resource with entity urn %s, domain urn %s", entityUrn, domainUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to set Domain to resource with entity urn {}, domain urn {}: {}", + entityUrn, + domainUrn, + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to set Domain to resource with entity urn %s, domain urn %s", + entityUrn, domainUrn), + e); + } + }); } public static Boolean validateSetDomainInput( - Urn entityUrn, - Urn domainUrn, - EntityService entityService - ) { + Urn entityUrn, Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Domain does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Domain does not exist.", + entityUrn, domainUrn)); } if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( - String.format("Failed to add Entity %s to Domain %s. Entity does not exist.", entityUrn, domainUrn)); + String.format( + "Failed to add Entity %s to Domain %s. Entity does not exist.", + entityUrn, domainUrn)); } return true; @@ -90,4 +98,4 @@ private static void setDomain(Domains domains, Urn domainUrn) { newDomain.add(domainUrn); domains.setDomains(newDomain); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java index 01dd4f1254f8eb..b2a82ac7608d89 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,19 +20,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for removing the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class UnsetDomainResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -37,39 +38,40 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("entityUrn")); - return CompletableFuture.supplyAsync(() -> { - - if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(environment.getContext(), entityUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity( + environment.getContext(), entityUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - validateUnsetDomainInput( - entityUrn, - _entityService - ); - try { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - DOMAINS_ASPECT_NAME, - _entityService, - new Domains()); - unsetDomain(domains); + validateUnsetDomainInput(entityUrn, _entityService); + try { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), DOMAINS_ASPECT_NAME, _entityService, new Domains()); + unsetDomain(domains); - // Create the Domains aspects - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to unset Domains for resource with entity urn {}: {}", entityUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), e); - } - }); + // Create the Domains aspects + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, DOMAINS_ASPECT_NAME, domains); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error( + "Failed to unset Domains for resource with entity urn {}: {}", + entityUrn, + e.getMessage()); + throw new RuntimeException( + String.format("Failed to unset Domains for resource with entity urn %s", entityUrn), + e); + } + }); } - public static Boolean validateUnsetDomainInput( - Urn entityUrn, - EntityService entityService - ) { + public static Boolean validateUnsetDomainInput(Urn entityUrn, EntityService entityService) { if (!entityService.exists(entityUrn)) { throw new IllegalArgumentException( @@ -85,4 +87,4 @@ private static void unsetDomain(@Nonnull Domains domains) { } domains.getDomains().clear(); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java index dbaf6000477aa1..e1b264606074c6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Embed; import com.linkedin.common.urn.Urn; @@ -19,14 +23,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for updating the embed render URL for an asset. - */ +/** Resolver used for updating the embed render URL for an asset. */ @Slf4j @RequiredArgsConstructor public class UpdateEmbedResolver implements DataFetcher> { @@ -37,62 +34,70 @@ public class UpdateEmbedResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateEmbedInput input = bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); + final UpdateEmbedInput input = + bindArgument(environment.getArgument("input"), UpdateEmbedInput.class); final Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + validateUpdateEmbedInput(input, _entityService); + try { + final Embed embed = + (Embed) + EntityUtils.getAspectFromEntity( + entityUrn.toString(), EMBED_ASPECT_NAME, _entityService, new Embed()); - if (!EmbedUtils.isAuthorizedToUpdateEmbedForEntity(entityUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - validateUpdateEmbedInput( - input, - _entityService - ); - try { - final Embed embed = (Embed) EntityUtils.getAspectFromEntity( - entityUrn.toString(), - EMBED_ASPECT_NAME, - _entityService, - new Embed()); + updateEmbed(embed, input); - updateEmbed(embed, input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); - _entityService.ingestProposal( - proposal, - new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis()), - false - ); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Embed for to resource with entity urn %s", entityUrn), e); - } - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(entityUrn, EMBED_ASPECT_NAME, embed); + _entityService.ingestProposal( + proposal, + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis()), + false); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to update Embed for to resource with entity urn %s", entityUrn), + e); + } + }); } /** - * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link IllegalArgumentException} if the input - * is not valid. + * Validates an instance of {@link UpdateEmbedInput}, and throws an {@link + * IllegalArgumentException} if the input is not valid. * - * For an input to be valid, the target URN must exist. + *

For an input to be valid, the target URN must exist. * * @param input the input to validate * @param entityService an instance of {@link EntityService} used to validate the input. */ - private static void validateUpdateEmbedInput(@Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { + private static void validateUpdateEmbedInput( + @Nonnull final UpdateEmbedInput input, @Nonnull final EntityService entityService) { if (!entityService.exists(UrnUtils.getUrn(input.getUrn()))) { throw new IllegalArgumentException( - String.format("Failed to update embed for entity with urn %s. Entity does not exist!", input.getUrn())); + String.format( + "Failed to update embed for entity with urn %s. Entity does not exist!", + input.getUrn())); } } /** * Applies an instance of {@link UpdateEmbedInput} to a base instance of {@link Embed}. + * * @param embed an embed to update * @param input the updates to apply */ - private static void updateEmbed(@Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { + private static void updateEmbed( + @Nonnull final Embed embed, @Nonnull final UpdateEmbedInput input) { embed.setRenderUrl(input.getRenderUrl(), SetMode.IGNORE_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java index 613f97182c5dda..d2bd2f3fb8a17d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.metadata.entity.EntityService; @@ -8,12 +10,7 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for returning whether an entity exists. - */ +/** Resolver responsible for returning whether an entity exists. */ public class EntityExistsResolver implements DataFetcher> { private final EntityService _entityService; @@ -22,7 +19,8 @@ public EntityExistsResolver(final EntityService entityService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { String entityUrnString = bindArgument(environment.getArgument("urn"), String.class); // resolver can be used as its own endpoint or when hydrating an entity if (entityUrnString == null && environment.getSource() != null) { @@ -31,12 +29,14 @@ public CompletableFuture get(final DataFetchingEnvironment environment) Objects.requireNonNull(entityUrnString, "Entity urn must not be null!"); final Urn entityUrn = Urn.createFromString(entityUrnString); - return CompletableFuture.supplyAsync(() -> { - try { - return _entityService.exists(entityUrn); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to check whether entity %s exists", entityUrn.toString())); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _entityService.exists(entityUrn); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to check whether entity %s exists", entityUrn.toString())); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java index d8190a160f268d..751c6096de1a2d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolver.java @@ -9,17 +9,16 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPrivileges; -import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.EmbedUtils; +import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.extern.slf4j.Slf4j; - import java.util.Collections; import java.util.concurrent.CompletableFuture; +import lombok.extern.slf4j.Slf4j; @Slf4j public class EntityPrivilegesResolver implements DataFetcher> { @@ -36,25 +35,28 @@ public CompletableFuture get(DataFetchingEnvironment environme final String urnString = ((Entity) environment.getSource()).getUrn(); final Urn urn = UrnUtils.getUrn(urnString); - return CompletableFuture.supplyAsync(() -> { - switch (urn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return getGlossaryTermPrivileges(urn, context); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return getGlossaryNodePrivileges(urn, context); - case Constants.DATASET_ENTITY_NAME: - return getDatasetPrivileges(urn, context); - case Constants.CHART_ENTITY_NAME: - return getChartPrivileges(urn, context); - case Constants.DASHBOARD_ENTITY_NAME: - return getDashboardPrivileges(urn, context); - case Constants.DATA_JOB_ENTITY_NAME: - return getDataJobPrivileges(urn, context); - default: - log.warn("Tried to get entity privileges for entity type {} but nothing is implemented for it yet", urn.getEntityType()); - return new EntityPrivileges(); - } - }); + return CompletableFuture.supplyAsync( + () -> { + switch (urn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return getGlossaryTermPrivileges(urn, context); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return getGlossaryNodePrivileges(urn, context); + case Constants.DATASET_ENTITY_NAME: + return getDatasetPrivileges(urn, context); + case Constants.CHART_ENTITY_NAME: + return getChartPrivileges(urn, context); + case Constants.DASHBOARD_ENTITY_NAME: + return getDashboardPrivileges(urn, context); + case Constants.DATA_JOB_ENTITY_NAME: + return getDataJobPrivileges(urn, context); + default: + log.warn( + "Tried to get entity privileges for entity type {} but nothing is implemented for it yet", + urn.getEntityType()); + return new EntityPrivileges(); + } + }); } private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext context) { @@ -66,7 +68,8 @@ private EntityPrivileges getGlossaryTermPrivileges(Urn termUrn, QueryContext con } Urn parentNodeUrn = GlossaryUtils.getParentUrn(termUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; @@ -80,25 +83,29 @@ private EntityPrivileges getGlossaryNodePrivileges(Urn nodeUrn, QueryContext con result.setCanManageChildren(true); return result; } - Boolean canManageChildren = GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); + Boolean canManageChildren = + GlossaryUtils.canManageChildrenEntities(context, nodeUrn, _entityClient); result.setCanManageChildren(canManageChildren); Urn parentNodeUrn = GlossaryUtils.getParentUrn(nodeUrn, context, _entityClient); if (parentNodeUrn != null) { - Boolean canManage = GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); + Boolean canManage = + GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient); result.setCanManageEntity(canManage); } return result; } private boolean canEditEntityLineage(Urn urn, QueryContext context) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup orPrivilegesGroup = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup orPrivilegesGroup = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java index 69b5b14edfbeeb..535dbbf70a4cbc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; @@ -9,22 +12,18 @@ import com.linkedin.datahub.graphql.generated.RelatedTermsInput; import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; +import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -36,70 +35,89 @@ public class AddRelatedTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - validateRelatedTermsInput(urn, termUrns); - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - glossaryRelatedTerms = new GlossaryRelatedTerms(); - } - - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + validateRelatedTermsInput(urn, termUrns); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + glossaryRelatedTerms = new GlossaryRelatedTerms(); + } + + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + glossaryRelatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); + + return updateRelatedTerms( + termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add related terms to %s", input.getUrn()), e); } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - glossaryRelatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray()); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); - - return updateRelatedTerms(termUrns, existingTermUrns, urn, glossaryRelatedTerms, actor); } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add related terms to %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } public Boolean validateRelatedTermsInput(Urn urn, List termUrns) { - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); } for (Urn termUrn : termUrns) { if (termUrn.equals(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. Tried to create related term with itself.", urn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. Tried to create related term with itself.", urn)); } else if (!termUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s is not a glossaryTerm.", urn, termUrn)); } else if (!_entityService.exists(termUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", urn, termUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", urn, termUrn)); } } return true; } - private Boolean updateRelatedTerms(List termUrns, GlossaryTermUrnArray existingTermUrns, Urn urn, GlossaryRelatedTerms glossaryRelatedTerms, Urn actor) { + private Boolean updateRelatedTerms( + List termUrns, + GlossaryTermUrnArray existingTermUrns, + Urn urn, + GlossaryRelatedTerms glossaryRelatedTerms, + Urn actor) { List termsToAdd = new ArrayList<>(); for (Urn termUrn : termUrns) { if (existingTermUrns.stream().anyMatch(association -> association.equals(termUrn))) { @@ -117,7 +135,12 @@ private Boolean updateRelatedTerms(List termUrns, GlossaryTermUrnArray exis existingTermUrns.add(newUrn); } - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); return true; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index cc0ab4e03a4e8d..815b4662e1ed24 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -19,18 +24,11 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.UUID; import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,41 +41,67 @@ public class CreateGlossaryNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - try { - final GlossaryNodeKey key = new GlossaryNodeKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Node already exists!"); + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + try { + final GlossaryNodeKey key = new GlossaryNodeKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_NODE_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Node already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_NODE_ENTITY_NAME, + GLOSSARY_NODE_INFO_ASPECT_NAME, + mapGlossaryNodeInfo(input)); + + String glossaryNodeUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryNodeUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryNodeUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryNode with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNode with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, mapGlossaryNodeInfo(input)); - - String glossaryNodeUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } - - OwnerUtils.addCreatorAsOwner(context, glossaryNodeUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryNodeUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryNode with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryNode with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput input) { @@ -90,10 +114,12 @@ private GlossaryNodeInfo mapGlossaryNodeInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index ad69e0c5876e2c..90979fe918f71a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,9 +30,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.Collections; import java.util.HashMap; @@ -37,12 +39,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -57,42 +55,69 @@ public class CreateGlossaryTermResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateGlossaryEntityInput input = bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); - final Urn parentNode = input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; - - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { - // Ensure there isn't another glossary term with the same name at this level of the glossary - validateGlossaryTermName(parentNode, context, input.getName()); - try { - final GlossaryTermKey key = new GlossaryTermKey(); - - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Glossary Term already exists!"); - } - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, mapGlossaryTermInfo(input)); - - String glossaryTermUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; + final CreateGlossaryEntityInput input = + bindArgument(environment.getArgument("input"), CreateGlossaryEntityInput.class); + final Urn parentNode = + input.getParentNode() != null ? UrnUtils.getUrn(input.getParentNode()) : null; + + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNode, _entityClient)) { + // Ensure there isn't another glossary term with the same name at this level of the + // glossary + validateGlossaryTermName(parentNode, context, input.getName()); + try { + final GlossaryTermKey key = new GlossaryTermKey(); + + final String id = + input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, GLOSSARY_TERM_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Glossary Term already exists!"); + } + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + GLOSSARY_TERM_ENTITY_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + mapGlossaryTermInfo(input)); + + String glossaryTermUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } + + OwnerUtils.addCreatorAsOwner( + context, + glossaryTermUrn, + OwnerEntityType.CORP_USER, + ownershipType, + _entityService); + return glossaryTermUrn; + } catch (Exception e) { + log.error( + "Failed to create GlossaryTerm with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create GlossaryTerm with id: %s, name: %s", + input.getId(), input.getName()), + e); + } } - - OwnerUtils.addCreatorAsOwner(context, glossaryTermUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return glossaryTermUrn; - } catch (Exception e) { - log.error("Failed to create GlossaryTerm with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create GlossaryTerm with id: %s, name: %s", input.getId(), input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput input) { @@ -106,7 +131,10 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp final GlossaryNodeUrn parentNode = GlossaryNodeUrn.createFromString(input.getParentNode()); result.setParentNode(parentNode, SetMode.IGNORE_NULL); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), e); + throw new RuntimeException( + String.format( + "Failed to create GlossaryNodeUrn from string: %s", input.getParentNode()), + e); } } return result; @@ -114,25 +142,22 @@ private GlossaryTermInfo mapGlossaryTermInfo(final CreateGlossaryEntityInput inp private Filter buildParentNodeFilter(final Urn parentNodeUrn) { final Map criterionMap = new HashMap<>(); - criterionMap.put(PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); + criterionMap.put( + PARENT_NODE_INDEX_FIELD_NAME, parentNodeUrn == null ? null : parentNodeUrn.toString()); return QueryUtils.newFilter(criterionMap); } private Map getTermsWithSameParent(Urn parentNode, QueryContext context) { try { final Filter filter = buildParentNodeFilter(parentNode); - final SearchResult searchResult = _entityClient.filter( - GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + _entityClient.filter( + GLOSSARY_TERM_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final List termUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List termUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); return _entityClient.batchGetV2( GLOSSARY_TERM_ENTITY_NAME, @@ -147,14 +172,17 @@ private Map getTermsWithSameParent(Urn parentNode, QueryCon private void validateGlossaryTermName(Urn parentNode, QueryContext context, String name) { Map entities = getTermsWithSameParent(parentNode, context); - entities.forEach((urn, entityResponse) -> { - if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); - GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); - if (termInfo.hasName() && termInfo.getName().equals(name)) { - throw new IllegalArgumentException("Glossary Term with this name already exists at this level of the Business Glossary"); - } - } - }); + entities.forEach( + (urn, entityResponse) -> { + if (entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); + if (termInfo.hasName() && termInfo.getName().equals(name)) { + throw new IllegalArgumentException( + "Glossary Term with this name already exists at this level of the Business Glossary"); + } + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java index 0929c7138528da..f623f0e34b3669 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolver.java @@ -11,50 +11,59 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - @Slf4j public class DeleteGlossaryEntityResolver implements DataFetcher> { private final EntityClient _entityClient; private final EntityService _entityService; - public DeleteGlossaryEntityResolver(final EntityClient entityClient, EntityService entityService) { + public DeleteGlossaryEntityResolver( + final EntityClient entityClient, EntityService entityService) { _entityClient = entityClient; _entityService = entityService; } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(environment.getArgument("urn")); final Urn parentNodeUrn = GlossaryUtils.getParentUrn(entityUrn, context, _entityClient); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { - if (!_entityService.exists(entityUrn)) { - throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); - } - - try { - _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + if (!_entityService.exists(entityUrn)) { + throw new RuntimeException(String.format("This urn does not exist: %s", entityUrn)); + } - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { try { - _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + _entityClient.deleteEntity(entityUrn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(entityUrn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for glossary entity with urn %s", + entityUrn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for glossary entity with urn %s", entityUrn), e); + throw new RuntimeException( + String.format( + "Failed to perform delete against glossary entity with urn %s", entityUrn), + e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against glossary entity with urn %s", entityUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java index 1457a308c8774f..e7990b1a343d83 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryNodesResolver implements DataFetcher> { +public class GetRootGlossaryNodesResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryNodesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); - - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsNodesResult = _entityClient.filter( - Constants.GLOSSARY_NODE_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); - - final List glossaryNodeUrns = gmsNodesResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); - - final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); - result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); - result.setCount(glossaryNodeUrns.size()); - result.setStart(gmsNodesResult.getFrom()); - result.setTotal(gmsNodesResult.getNumEntities()); - - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); + + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsNodesResult = + _entityClient.filter( + Constants.GLOSSARY_NODE_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); + + final List glossaryNodeUrns = + gmsNodesResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); + + final GetRootGlossaryNodesResult result = new GetRootGlossaryNodesResult(); + result.setNodes(mapUnresolvedGlossaryNodes(glossaryNodeUrns)); + result.setCount(glossaryNodeUrns.size()); + result.setStart(gmsNodesResult.getFrom()); + result.setTotal(gmsNodesResult.getNumEntities()); + + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary nodes from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } @@ -101,4 +103,3 @@ private List mapUnresolvedGlossaryNodes(final List entityUrns return results; } } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java index f7684e477f8307..40e4363dcff938 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,15 +22,13 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - -public class GetRootGlossaryTermsResolver implements DataFetcher> { +public class GetRootGlossaryTermsResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -37,56 +37,58 @@ public GetRootGlossaryTermsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - final GetRootGlossaryEntitiesInput input = bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); - final Integer start = input.getStart(); - final Integer count = input.getCount(); + return CompletableFuture.supplyAsync( + () -> { + final GetRootGlossaryEntitiesInput input = + bindArgument(environment.getArgument("input"), GetRootGlossaryEntitiesInput.class); + final Integer start = input.getStart(); + final Integer count = input.getCount(); - try { - final Filter filter = buildGlossaryEntitiesFilter(); - final SearchResult gmsTermsResult = _entityClient.filter( - Constants.GLOSSARY_TERM_ENTITY_NAME, - filter, - null, - start, - count, - context.getAuthentication()); + try { + final Filter filter = buildGlossaryEntitiesFilter(); + final SearchResult gmsTermsResult = + _entityClient.filter( + Constants.GLOSSARY_TERM_ENTITY_NAME, + filter, + null, + start, + count, + context.getAuthentication()); - final List glossaryTermUrns = gmsTermsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + final List glossaryTermUrns = + gmsTermsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); - result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); - result.setCount(glossaryTermUrns.size()); - result.setStart(gmsTermsResult.getFrom()); - result.setTotal(gmsTermsResult.getNumEntities()); + final GetRootGlossaryTermsResult result = new GetRootGlossaryTermsResult(); + result.setTerms(mapUnresolvedGlossaryTerms(glossaryTermUrns)); + result.setCount(glossaryTermUrns.size()); + result.setStart(gmsTermsResult.getFrom()); + result.setTotal(gmsTermsResult.getNumEntities()); - return result; - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); - } - }); + return result; + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve root glossary terms from GMS", e); + } + }); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java index d513d70f39f589..850469f9965157 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; @@ -14,18 +18,13 @@ import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; - -public class ParentNodesResolver implements DataFetcher> { +public class ParentNodesResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -36,19 +35,23 @@ public ParentNodesResolver(final EntityClient entityClient) { private void aggregateParentNodes(List nodes, String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_NODE_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data(); GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(dataMap); if (nodeInfo.hasParentNode()) { Urn parentNodeUrn = nodeInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); nodes.add(mappedNode); @@ -64,19 +67,23 @@ private void aggregateParentNodes(List nodes, String urn, QueryCon private GlossaryNode getTermParentNode(String urn, QueryContext context) { try { Urn entityUrn = new Urn(urn); - EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), - context.getAuthentication() - ); + EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); - if (entityResponse != null && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); + if (entityResponse != null + && entityResponse.getAspects().containsKey(GLOSSARY_TERM_INFO_ASPECT_NAME)) { + DataMap dataMap = + entityResponse.getAspects().get(GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data(); GlossaryTermInfo termInfo = new GlossaryTermInfo(dataMap); if (termInfo.hasParentNode()) { Urn parentNodeUrn = termInfo.getParentNode(); - EntityResponse response = _entityClient.getV2(parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); + EntityResponse response = + _entityClient.getV2( + parentNodeUrn.getEntityType(), parentNodeUrn, null, context.getAuthentication()); if (response != null) { GlossaryNode mappedNode = GlossaryNodeMapper.map(response); return mappedNode; @@ -95,27 +102,28 @@ public CompletableFuture get(DataFetchingEnvironment environm final String urn = ((Entity) environment.getSource()).getUrn(); final List nodes = new ArrayList<>(); - return CompletableFuture.supplyAsync(() -> { - try { - final String type = Urn.createFromString(urn).getEntityType(); + return CompletableFuture.supplyAsync( + () -> { + try { + final String type = Urn.createFromString(urn).getEntityType(); - if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { - final GlossaryNode parentNode = getTermParentNode(urn, context); - if (parentNode != null) { - nodes.add(parentNode); - aggregateParentNodes(nodes, parentNode.getUrn(), context); - } - } else { - aggregateParentNodes(nodes, urn, context); - } + if (GLOSSARY_TERM_ENTITY_NAME.equals(type)) { + final GlossaryNode parentNode = getTermParentNode(urn, context); + if (parentNode != null) { + nodes.add(parentNode); + aggregateParentNodes(nodes, parentNode.getUrn(), context); + } + } else { + aggregateParentNodes(nodes, urn, context); + } - final ParentNodesResult result = new ParentNodesResult(); - result.setCount(nodes.size()); - result.setNodes(nodes); - return result; - } catch (DataHubGraphQLException | URISyntaxException e) { - throw new RuntimeException(("Failed to load parent nodes")); - } - }); + final ParentNodesResult result = new ParentNodesResult(); + result.setCount(nodes.size()); + result.setNodes(nodes); + return result; + } catch (DataHubGraphQLException | URISyntaxException e) { + throw new RuntimeException(("Failed to load parent nodes")); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java index 417ef4292d0f7d..8c9b792b74e0dd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,15 +17,11 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -34,57 +33,82 @@ public class RemoveRelatedTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final RelatedTermsInput input = bindArgument(environment.getArgument("input"), RelatedTermsInput.class); + final RelatedTermsInput input = + bindArgument(environment.getArgument("input"), RelatedTermsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (GlossaryUtils.canManageGlossaries(context)) { - try { - final TermRelationshipType relationshipType = input.getRelationshipType(); - final Urn urn = Urn.createFromString(input.getUrn()); - final List termUrnsToRemove = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + return CompletableFuture.supplyAsync( + () -> { + if (GlossaryUtils.canManageGlossaries(context)) { + try { + final TermRelationshipType relationshipType = input.getRelationshipType(); + final Urn urn = Urn.createFromString(input.getUrn()); + final List termUrnsToRemove = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) || !_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryTerm.", urn, urn)); - } + if (!urn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || !_entityService.exists(urn)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryTerm.", + urn, urn)); + } - Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); + Urn actor = Urn.createFromString(((QueryContext) context).getActorUrn()); - GlossaryRelatedTerms glossaryRelatedTerms = (GlossaryRelatedTerms) EntityUtils.getAspectFromEntity( - urn.toString(), - Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, - _entityService, - null - ); - if (glossaryRelatedTerms == null) { - throw new RuntimeException(String.format("Related Terms for this Urn do not exist: %s", urn)); - } + GlossaryRelatedTerms glossaryRelatedTerms = + (GlossaryRelatedTerms) + EntityUtils.getAspectFromEntity( + urn.toString(), + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + _entityService, + null); + if (glossaryRelatedTerms == null) { + throw new RuntimeException( + String.format("Related Terms for this Urn do not exist: %s", urn)); + } - if (relationshipType == TermRelationshipType.isA) { - if (!glossaryRelatedTerms.hasIsRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getIsRelatedTerms(); + if (relationshipType == TermRelationshipType.isA) { + if (!glossaryRelatedTerms.hasIsRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getIsRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; - } else { - if (!glossaryRelatedTerms.hasHasRelatedTerms()) { - throw new RuntimeException("Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); - } - final GlossaryTermUrnArray existingTermUrns = glossaryRelatedTerms.getHasRelatedTerms(); + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } else { + if (!glossaryRelatedTerms.hasHasRelatedTerms()) { + throw new RuntimeException( + "Failed to remove from GlossaryRelatedTerms as they do not exist for this Glossary Term"); + } + final GlossaryTermUrnArray existingTermUrns = + glossaryRelatedTerms.getHasRelatedTerms(); - existingTermUrns.removeIf(termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); - persistAspect(urn, Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, glossaryRelatedTerms, actor, _entityService); - return true; + existingTermUrns.removeIf( + termUrn -> termUrnsToRemove.stream().anyMatch(termUrn::equals)); + persistAspect( + urn, + Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME, + glossaryRelatedTerms, + actor, + _entityService); + return true; + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to removes related terms from %s", input.getUrn()), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to removes related terms from %s", input.getUrn()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java index daff0962bc2e81..acfc2cd14f8d4d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,13 +20,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver that adds a set of native members to a group, if the user and group both exist. - */ +/** Resolver that adds a set of native members to a group, if the user and group both exist. */ public class AddGroupMembersResolver implements DataFetcher> { private final GroupService _groupService; @@ -33,9 +30,11 @@ public AddGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { - final AddGroupMembersInput input = bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); + final AddGroupMembersInput input = + bindArgument(environment.getArgument("input"), AddGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -52,30 +51,37 @@ public CompletableFuture get(final DataFetchingEnvironment environment) String.format("Failed to add members to group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership for group %s when adding group members", groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually added to it", - groupUrnStr)); - } + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership for group %s when adding group members", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually added to it", + groupUrnStr)); + } - try { - // Add each user to the group - final List userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - userUrnList.forEach(userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to add group members to group %s", groupUrnStr)); - } - }); + try { + // Add each user to the group + final List userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + userUrnList.forEach( + userUrn -> _groupService.addUserToNativeGroup(userUrn, groupUrn, authentication)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to add group members to group %s", groupUrnStr)); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java index 75f2a61287ecc4..e487ee00608d4a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,10 +14,8 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -// Currently, this resolver will override the group details, but not group membership, if a group with the same name already exists. +// Currently, this resolver will override the group details, but not group membership, if a group +// with the same name already exists. public class CreateGroupResolver implements DataFetcher> { private final GroupService _groupService; @@ -33,19 +33,22 @@ public CompletableFuture get(final DataFetchingEnvironment environment) throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - final CreateGroupInput input = bindArgument(environment.getArgument("input"), CreateGroupInput.class); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, check if the group already exists. - // Create the Group key. - final CorpGroupKey key = new CorpGroupKey(); - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". - return _groupService.createNativeGroup(key, input.getName(), input.getDescription(), authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create group", e); - } - }); + final CreateGroupInput input = + bindArgument(environment.getArgument("input"), CreateGroupInput.class); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, check if the group already exists. + // Create the Group key. + final CorpGroupKey key = new CorpGroupKey(); + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); // 'name' in the key really reflects nothing more than a stable "id". + return _groupService.createNativeGroup( + key, input.getName(), input.getDescription(), authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create group", e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java index d0874b21fb1062..93582fb956bd8d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/EntityCountsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityCountInput; import com.linkedin.datahub.graphql.generated.EntityCountResult; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class EntityCountsResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -27,31 +26,42 @@ public EntityCountsResolver(final EntityClient entityClient) { @Override @WithSpan - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final EntityCountInput input = bindArgument(environment.getArgument("input"), EntityCountInput.class); - final EntityCountResults results = new EntityCountResults(); - - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all counts - Map gmsResult = _entityClient.batchGetTotalEntityCount( - input.getTypes().stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), context.getAuthentication()); - - // bind to a result. - List resultList = gmsResult.entrySet().stream().map(entry -> { - EntityCountResult result = new EntityCountResult(); - result.setCount(Math.toIntExact(entry.getValue())); - result.setEntityType(EntityTypeMapper.getType(entry.getKey())); - return result; - }).collect(Collectors.toList()); - results.setCounts(resultList); - return results; - } catch (Exception e) { - throw new RuntimeException("Failed to get entity counts", e); - } - }); + final EntityCountInput input = + bindArgument(environment.getArgument("input"), EntityCountInput.class); + final EntityCountResults results = new EntityCountResults(); + + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all counts + Map gmsResult = + _entityClient.batchGetTotalEntityCount( + input.getTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + context.getAuthentication()); + + // bind to a result. + List resultList = + gmsResult.entrySet().stream() + .map( + entry -> { + EntityCountResult result = new EntityCountResult(); + result.setCount(Math.toIntExact(entry.getValue())); + result.setEntityType(EntityTypeMapper.getType(entry.getKey())); + return result; + }) + .collect(Collectors.toList()); + results.setCounts(resultList); + return results; + } catch (Exception e) { + throw new RuntimeException("Failed to get entity counts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java index 67cc84a33a9541..a6ad8698679f02 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/ListGroupsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -24,10 +27,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListGroupsResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -41,51 +40,68 @@ public ListGroupsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListGroupsInput input = bindArgument(environment.getArgument("input"), ListGroupsInput.class); + final ListGroupsInput input = + bindArgument(environment.getArgument("input"), ListGroupsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all group Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_GROUP_ENTITY_NAME, + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_GROUP_ENTITY_NAME, query, null, - new SortCriterion().setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, count, context.getAuthentication(), + new SortCriterion() + .setField(CORP_GROUP_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), new SearchFlags().setFulltext(true)); - // Then, get hydrate all groups. - final Map entities = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), null, context.getAuthentication()); + // Then, get hydrate all groups. + final Map entities = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListGroupsResult result = new ListGroupsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setGroups(mapUnresolvedGroups(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list groups", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListGroupsResult result = new ListGroupsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setGroups( + mapUnresolvedGroups( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list groups", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - // This method maps urns returned from the list endpoint into Partial Group objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Group objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedGroups(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java index 287b4aa7b5dbd7..9fb63b3eb463d5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -17,10 +20,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class RemoveGroupMembersResolver implements DataFetcher> { private final GroupService _groupService; @@ -30,9 +29,11 @@ public RemoveGroupMembersResolver(final GroupService groupService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { - final RemoveGroupMembersInput input = bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); + final RemoveGroupMembersInput input = + bindArgument(environment.getArgument("input"), RemoveGroupMembersInput.class); final String groupUrnStr = input.getGroupUrn(); final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -43,37 +44,42 @@ public CompletableFuture get(final DataFetchingEnvironment environment) } final Urn groupUrn = Urn.createFromString(groupUrnStr); - final List userUrnList = input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + final List userUrnList = + input.getUserUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); if (!_groupService.groupExists(groupUrn)) { // The group doesn't exist. throw new DataHubGraphQLException( - String.format("Failed to add remove members from group %s. Group does not exist.", groupUrnStr), + String.format( + "Failed to add remove members from group %s. Group does not exist.", groupUrnStr), DataHubGraphQLErrorCode.NOT_FOUND); } - return CompletableFuture.supplyAsync(() -> { - Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); - if (groupOrigin == null || !groupOrigin.hasType()) { - try { - _groupService.migrateGroupMembershipToNativeGroupMembership(groupUrn, context.getActorUrn(), - context.getAuthentication()); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to migrate group membership when removing group members from group %s", - groupUrnStr)); - } - } else if (groupOrigin.getType() == OriginType.EXTERNAL) { - throw new RuntimeException(String.format( - "Group %s was ingested from an external provider and cannot have members manually removed from it", - groupUrnStr)); - } - try { - _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + Origin groupOrigin = _groupService.getGroupOrigin(groupUrn); + if (groupOrigin == null || !groupOrigin.hasType()) { + try { + _groupService.migrateGroupMembershipToNativeGroupMembership( + groupUrn, context.getActorUrn(), context.getAuthentication()); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to migrate group membership when removing group members from group %s", + groupUrnStr)); + } + } else if (groupOrigin.getType() == OriginType.EXTERNAL) { + throw new RuntimeException( + String.format( + "Group %s was ingested from an external provider and cannot have members manually removed from it", + groupUrnStr)); + } + try { + _groupService.removeExistingNativeGroupMembers(groupUrn, userUrnList, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException(e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java index 99481868e30cee..e69d6b471f3c51 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class RemoveGroupResolver implements DataFetcher> { @@ -24,30 +21,39 @@ public RemoveGroupResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String groupUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(groupUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for group with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for group with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against group with urn %s", groupUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against group with urn %s", groupUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java index 6a4af7563a8d8e..036780d4467013 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtils.java @@ -1,25 +1,30 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class IngestionAuthUtils { public static boolean canManageIngestion(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_INGESTION_PRIVILEGE.getType()), + authorizer); } public static boolean canManageSecrets(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, ImmutableList.of(PoliciesConfig.MANAGE_SECRETS_PRIVILEGE.getType()), authorizer); } - private IngestionAuthUtils() { } + private IngestionAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java index 1140c031f1d355..ffa9dcf42d176f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionResolverUtils.java @@ -25,11 +25,11 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; - @Slf4j public class IngestionResolverUtils { - public static List mapExecutionRequests(final Collection requests) { + public static List mapExecutionRequests( + final Collection requests) { List result = new ArrayList<>(); for (final EntityResponse request : requests) { result.add(mapExecutionRequest(request)); @@ -46,10 +46,13 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe result.setId(entityUrn.getId()); // Map input aspect. Must be present. - final EnvelopedAspect envelopedInput = aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); + final EnvelopedAspect envelopedInput = + aspects.get(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME); if (envelopedInput != null) { - final ExecutionRequestInput executionRequestInput = new ExecutionRequestInput(envelopedInput.getValue().data()); - final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); + final ExecutionRequestInput executionRequestInput = + new ExecutionRequestInput(envelopedInput.getValue().data()); + final com.linkedin.datahub.graphql.generated.ExecutionRequestInput inputResult = + new com.linkedin.datahub.graphql.generated.ExecutionRequestInput(); inputResult.setTask(executionRequestInput.getTask()); if (executionRequestInput.hasSource()) { @@ -63,23 +66,29 @@ public static ExecutionRequest mapExecutionRequest(final EntityResponse entityRe } // Map result aspect. Optional. - final EnvelopedAspect envelopedResult = aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); + final EnvelopedAspect envelopedResult = + aspects.get(Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME); if (envelopedResult != null) { - final ExecutionRequestResult executionRequestResult = new ExecutionRequestResult(envelopedResult.getValue().data()); + final ExecutionRequestResult executionRequestResult = + new ExecutionRequestResult(envelopedResult.getValue().data()); result.setResult(mapExecutionRequestResult(executionRequestResult)); } return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestSource + mapExecutionRequestSource(final ExecutionRequestSource execRequestSource) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestSource result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestSource(); result.setType(execRequestSource.getType()); return result; } - public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { - final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); + public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult + mapExecutionRequestResult(final ExecutionRequestResult execRequestResult) { + final com.linkedin.datahub.graphql.generated.ExecutionRequestResult result = + new com.linkedin.datahub.graphql.generated.ExecutionRequestResult(); result.setStatus(execRequestResult.getStatus()); result.setStartTimeMs(execRequestResult.getStartTimeMs()); result.setDurationMs(execRequestResult.getDurationMs()); @@ -90,7 +99,8 @@ public static com.linkedin.datahub.graphql.generated.ExecutionRequestResult mapE return result; } - public static StructuredReport mapStructuredReport(final StructuredExecutionReport structuredReport) { + public static StructuredReport mapStructuredReport( + final StructuredExecutionReport structuredReport) { StructuredReport structuredReportResult = new StructuredReport(); structuredReportResult.setType(structuredReport.getType()); structuredReportResult.setSerializedValue(structuredReport.getSerializedValue()); @@ -98,7 +108,8 @@ public static StructuredReport mapStructuredReport(final StructuredExecutionRepo return structuredReportResult; } - public static List mapIngestionSources(final Collection entities) { + public static List mapIngestionSources( + final Collection entities) { final List results = new ArrayList<>(); for (EntityResponse response : entities) { try { @@ -118,16 +129,19 @@ public static IngestionSource mapIngestionSource(final EntityResponse ingestionS final EnvelopedAspect envelopedInfo = aspects.get(Constants.INGESTION_INFO_ASPECT_NAME); if (envelopedInfo == null) { - throw new IllegalStateException("No ingestion source info aspect exists for urn: " + entityUrn); + throw new IllegalStateException( + "No ingestion source info aspect exists for urn: " + entityUrn); } // Bind into a strongly typed object. - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); return mapIngestionSourceInfo(entityUrn, ingestionSourceInfo); } - public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHubIngestionSourceInfo info) { + public static IngestionSource mapIngestionSourceInfo( + final Urn urn, final DataHubIngestionSourceInfo info) { final IngestionSource result = new IngestionSource(); result.setUrn(urn.toString()); result.setName(info.getName()); @@ -139,29 +153,30 @@ public static IngestionSource mapIngestionSourceInfo(final Urn urn, final DataHu return result; } - public static IngestionConfig mapIngestionSourceConfig(final DataHubIngestionSourceConfig config) { + public static IngestionConfig mapIngestionSourceConfig( + final DataHubIngestionSourceConfig config) { final IngestionConfig result = new IngestionConfig(); result.setRecipe(config.getRecipe()); result.setVersion(config.getVersion()); result.setExecutorId(config.getExecutorId()); result.setDebugMode(config.isDebugMode()); if (config.getExtraArgs() != null) { - List extraArgs = config.getExtraArgs() - .keySet() - .stream() - .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) - .collect(Collectors.toList()); + List extraArgs = + config.getExtraArgs().keySet().stream() + .map(key -> new StringMapEntry(key, config.getExtraArgs().get(key))) + .collect(Collectors.toList()); result.setExtraArgs(extraArgs); } return result; } - public static IngestionSchedule mapIngestionSourceSchedule(final DataHubIngestionSourceSchedule schedule) { + public static IngestionSchedule mapIngestionSourceSchedule( + final DataHubIngestionSourceSchedule schedule) { final IngestionSchedule result = new IngestionSchedule(); result.setInterval(schedule.getInterval()); result.setTimezone(schedule.getTimezone()); return result; } - private IngestionResolverUtils() { } + private IngestionResolverUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java index 7f9cb6176989f8..e346f2b077c985 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -22,15 +26,9 @@ import java.util.Map; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Cancels a requested ingestion execution by emitting a KILL signal. - */ -public class CancelIngestionExecutionRequestResolver implements DataFetcher> { +/** Cancels a requested ingestion execution by emitting a KILL signal. */ +public class CancelIngestionExecutionRequestResolver + implements DataFetcher> { private static final String KILL_EXECUTION_REQUEST_SIGNAL = "KILL"; @@ -44,45 +42,58 @@ public CancelIngestionExecutionRequestResolver(final EntityClient entityClient) public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { - if (IngestionAuthUtils.canManageIngestion(context)) { + final CancelIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); - final CancelIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CancelIngestionExecutionRequestInput.class); + try { + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); - try { - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", ingestionSourceUrn), + DataHubGraphQLErrorCode.BAD_REQUEST); + } - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = - response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - // Build the arguments map. - final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); - execSignal.setSignal(KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. - execSignal.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execSignal.setCreatedAt(new AuditStamp() - .setTime(System.currentTimeMillis()) - .setActor(Urn.createFromString(context.getActorUrn())) - ); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn( - input.getExecutionRequestUrn()), EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, execSignal); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to submit cancel signal %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Build the arguments map. + final ExecutionRequestSignal execSignal = new ExecutionRequestSignal(); + execSignal.setSignal( + KILL_EXECUTION_REQUEST_SIGNAL); // Requests a kill of the running task. + execSignal.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execSignal.setCreatedAt( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(input.getExecutionRequestUrn()), + EXECUTION_REQUEST_SIGNAL_ASPECT_NAME, + execSignal); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to submit cancel signal %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java index ea20b837e0a1f6..8ef5447cd94335 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; @@ -30,15 +34,9 @@ import org.json.JSONException; import org.json.JSONObject; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ -public class CreateIngestionExecutionRequestResolver implements DataFetcher> { +/** Creates an on-demand ingestion execution request. */ +public class CreateIngestionExecutionRequestResolver + implements DataFetcher> { private static final String RUN_INGEST_TASK_NAME = "RUN_INGEST"; private static final String MANUAL_EXECUTION_SOURCE_NAME = "MANUAL_INGESTION_SOURCE"; @@ -49,7 +47,8 @@ public class CreateIngestionExecutionRequestResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final CreateIngestionExecutionRequestInput input = - bindArgument(environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - // Fetch the original ingestion source - final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); - final Map response = - _entityClient.batchGetV2(INGESTION_SOURCE_ENTITY_NAME, ImmutableSet.of(ingestionSourceUrn), - ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), context.getAuthentication()); - - if (!response.containsKey(ingestionSourceUrn)) { - throw new DataHubGraphQLException( - String.format("Failed to find ingestion source with urn %s", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - final EnvelopedAspect envelopedInfo = response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); - final DataHubIngestionSourceInfo ingestionSourceInfo = new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); - - if (!ingestionSourceInfo.getConfig().hasRecipe()) { - throw new DataHubGraphQLException( - String.format("Failed to find valid ingestion source with urn %s. Missing recipe", ingestionSourceUrn.toString()), - DataHubGraphQLErrorCode.BAD_REQUEST); - } - - // Build the arguments map. - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task - execInput.setSource( - new ExecutionRequestSource().setType(MANUAL_EXECUTION_SOURCE_NAME).setIngestionSource(ingestionSourceUrn)); - execInput.setExecutorId(ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map arguments = new HashMap<>(); - String recipe = ingestionSourceInfo.getConfig().getRecipe(); - recipe = injectRunId(recipe, executionRequestUrn.toString()); - recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); - arguments.put(RECIPE_ARG_NAME, recipe); - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().hasVersion() - ? ingestionSourceInfo.getConfig().getVersion() - : _ingestionConfiguration.getDefaultCliVersion() - ); - if (ingestionSourceInfo.getConfig().hasVersion()) { - arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); - } - String debugMode = "false"; - if (ingestionSourceInfo.getConfig().hasDebugMode()) { - debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; - } - if (ingestionSourceInfo.getConfig().hasExtraArgs()) { - arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final CreateIngestionExecutionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateIngestionExecutionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + // Fetch the original ingestion source + final Urn ingestionSourceUrn = Urn.createFromString(input.getIngestionSourceUrn()); + final Map response = + _entityClient.batchGetV2( + INGESTION_SOURCE_ENTITY_NAME, + ImmutableSet.of(ingestionSourceUrn), + ImmutableSet.of(INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + + if (!response.containsKey(ingestionSourceUrn)) { + throw new DataHubGraphQLException( + String.format( + "Failed to find ingestion source with urn %s", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + final EnvelopedAspect envelopedInfo = + response.get(ingestionSourceUrn).getAspects().get(INGESTION_INFO_ASPECT_NAME); + final DataHubIngestionSourceInfo ingestionSourceInfo = + new DataHubIngestionSourceInfo(envelopedInfo.getValue().data()); + + if (!ingestionSourceInfo.getConfig().hasRecipe()) { + throw new DataHubGraphQLException( + String.format( + "Failed to find valid ingestion source with urn %s. Missing recipe", + ingestionSourceUrn.toString()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + + // Build the arguments map. + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(RUN_INGEST_TASK_NAME); // Set the RUN_INGEST task + execInput.setSource( + new ExecutionRequestSource() + .setType(MANUAL_EXECUTION_SOURCE_NAME) + .setIngestionSource(ingestionSourceUrn)); + execInput.setExecutorId( + ingestionSourceInfo.getConfig().getExecutorId(), SetMode.IGNORE_NULL); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map arguments = new HashMap<>(); + String recipe = ingestionSourceInfo.getConfig().getRecipe(); + recipe = injectRunId(recipe, executionRequestUrn.toString()); + recipe = IngestionUtils.injectPipelineName(recipe, ingestionSourceUrn.toString()); + arguments.put(RECIPE_ARG_NAME, recipe); + arguments.put( + VERSION_ARG_NAME, + ingestionSourceInfo.getConfig().hasVersion() + ? ingestionSourceInfo.getConfig().getVersion() + : _ingestionConfiguration.getDefaultCliVersion()); + if (ingestionSourceInfo.getConfig().hasVersion()) { + arguments.put(VERSION_ARG_NAME, ingestionSourceInfo.getConfig().getVersion()); + } + String debugMode = "false"; + if (ingestionSourceInfo.getConfig().hasDebugMode()) { + debugMode = ingestionSourceInfo.getConfig().isDebugMode() ? "true" : "false"; + } + if (ingestionSourceInfo.getConfig().hasExtraArgs()) { + arguments.putAll(ingestionSourceInfo.getConfig().getExtraArgs()); + } + arguments.put(DEBUG_MODE_ARG_NAME, debugMode); + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new ingestion execution request %s", input), e); + } } - arguments.put(DEBUG_MODE_ARG_NAME, debugMode); - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, - EXECUTION_REQUEST_ENTITY_NAME, EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new ingestion execution request %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } /** - * Injects an override run id into a recipe for tracking purposes. Any existing run id will be overwritten. + * Injects an override run id into a recipe for tracking purposes. Any existing run id will be + * overwritten. * - * TODO: Determine if this should be handled in the executor itself. + *

TODO: Determine if this should be handled in the executor itself. * * @param runId the run id to place into the recipe * @return a modified recipe JSON string @@ -149,7 +170,8 @@ private String injectRunId(final String originalJson, final String runId) { return obj.toString(); } catch (JSONException e) { // This should ideally never be hit. - throw new IllegalArgumentException("Failed to create execution request: Invalid recipe json provided."); + throw new IllegalArgumentException( + "Failed to create execution request: Invalid recipe json provided."); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java index 1886db62ae4502..2505ce28c5c2ba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolver.java @@ -1,6 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; -import com.linkedin.metadata.config.IngestionConfiguration; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -10,26 +13,19 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.execution.ExecutionRequestInput; import com.linkedin.execution.ExecutionRequestSource; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.metadata.key.ExecutionRequestKey; import com.linkedin.metadata.utils.EntityKeyUtils; import com.linkedin.metadata.utils.IngestionUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates an on-demand ingestion execution request. - */ +/** Creates an on-demand ingestion execution request. */ public class CreateTestConnectionRequestResolver implements DataFetcher> { private static final String TEST_CONNECTION_TASK_NAME = "TEST_CONNECTION"; @@ -41,7 +37,8 @@ public class CreateTestConnectionRequestResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - final CreateTestConnectionRequestInput input = - bindArgument(environment.getArgument("input"), CreateTestConnectionRequestInput.class); - - try { - final ExecutionRequestKey key = new ExecutionRequestKey(); - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - key.setId(uuidStr); - final Urn executionRequestUrn = EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); - - final ExecutionRequestInput execInput = new ExecutionRequestInput(); - execInput.setTask(TEST_CONNECTION_TASK_NAME); - execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); - execInput.setExecutorId(DEFAULT_EXECUTOR_ID); - execInput.setRequestedAt(System.currentTimeMillis()); - - Map arguments = new HashMap<>(); - arguments.put(RECIPE_ARG_NAME, IngestionUtils.injectPipelineName(input.getRecipe(), executionRequestUrn.toString())); - if (input.getVersion() != null) { - arguments.put(VERSION_ARG_NAME, input.getVersion()); - } - execInput.setArgs(new StringMap(arguments)); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, EXECUTION_REQUEST_ENTITY_NAME, - EXECUTION_REQUEST_INPUT_ASPECT_NAME, execInput); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new test ingestion connection request %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + final CreateTestConnectionRequestInput input = + bindArgument( + environment.getArgument("input"), CreateTestConnectionRequestInput.class); + + try { + final ExecutionRequestKey key = new ExecutionRequestKey(); + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + key.setId(uuidStr); + final Urn executionRequestUrn = + EntityKeyUtils.convertEntityKeyToUrn(key, EXECUTION_REQUEST_ENTITY_NAME); + + final ExecutionRequestInput execInput = new ExecutionRequestInput(); + execInput.setTask(TEST_CONNECTION_TASK_NAME); + execInput.setSource(new ExecutionRequestSource().setType(TEST_CONNECTION_SOURCE_NAME)); + execInput.setExecutorId(DEFAULT_EXECUTOR_ID); + execInput.setRequestedAt(System.currentTimeMillis()); + + Map arguments = new HashMap<>(); + arguments.put( + RECIPE_ARG_NAME, + IngestionUtils.injectPipelineName( + input.getRecipe(), executionRequestUrn.toString())); + if (input.getVersion() != null) { + arguments.put(VERSION_ARG_NAME, input.getVersion()); + } + execInput.setArgs(new StringMap(arguments)); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, + EXECUTION_REQUEST_ENTITY_NAME, + EXECUTION_REQUEST_INPUT_ASPECT_NAME, + execInput); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to create new test ingestion connection request %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java index 8880330d634953..722ffe3aba6b84 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolver.java @@ -19,12 +19,10 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Retrieves an Ingestion Execution Request by primary key (urn). - */ +/** Retrieves an Ingestion Execution Request by primary key (urn). */ @Slf4j -public class GetIngestionExecutionRequestResolver implements DataFetcher> { +public class GetIngestionExecutionRequestResolver + implements DataFetcher> { private final EntityClient _entityClient; @@ -33,32 +31,40 @@ public GetIngestionExecutionRequestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch specific execution request - final Urn urn = Urn.createFromString(urnStr); - final Map entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No execution request found - throw new DataHubGraphQLException(String.format("Failed to find Execution Request with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Execution request found - return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve execution request", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch specific execution request + final Urn urn = Urn.createFromString(urnStr); + final Map entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No execution request found + throw new DataHubGraphQLException( + String.format("Failed to find Execution Request with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Execution request found + return IngestionResolverUtils.mapExecutionRequest(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve execution request", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java index c72f273a9027e2..01100a24d6b15c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolver.java @@ -29,11 +29,10 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -/** - * Retrieves a list of historical executions for a particular source. - */ +/** Retrieves a list of historical executions for a particular source. */ @Slf4j -public class IngestionSourceExecutionRequestsResolver implements DataFetcher> { +public class IngestionSourceExecutionRequestsResolver + implements DataFetcher> { private static final String INGESTION_SOURCE_FIELD_NAME = "ingestionSource"; private static final String REQUEST_TIME_MS_FIELD_NAME = "requestTimeMs"; @@ -45,64 +44,77 @@ public IngestionSourceExecutionRequestsResolver(final EntityClient entityClient) } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final String urn = ((IngestionSource) environment.getSource()).getUrn(); - final Integer start = environment.getArgument("start") != null ? environment.getArgument("start") : 0; - final Integer count = environment.getArgument("count") != null ? environment.getArgument("count") : 10; + final Integer start = + environment.getArgument("start") != null ? environment.getArgument("start") : 0; + final Integer count = + environment.getArgument("count") != null ? environment.getArgument("count") : 10; - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + try { - try { + // 1. Fetch the related edges + final Criterion filterCriterion = + new Criterion() + .setField(INGESTION_SOURCE_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(urn); - // 1. Fetch the related edges - final Criterion filterCriterion = new Criterion() - .setField(INGESTION_SOURCE_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(urn); + final SearchResult executionsSearchResult = + _entityClient.filter( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion))))), + new SortCriterion() + .setField(REQUEST_TIME_MS_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication()); - final SearchResult executionsSearchResult = _entityClient.filter( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )), - new SortCriterion().setField(REQUEST_TIME_MS_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication() - ); + // 2. Batch fetch the related ExecutionRequests + final Set relatedExecRequests = + executionsSearchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); - // 2. Batch fetch the related ExecutionRequests - final Set relatedExecRequests = executionsSearchResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Map entities = + _entityClient.batchGetV2( + Constants.EXECUTION_REQUEST_ENTITY_NAME, + relatedExecRequests, + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), + context.getAuthentication()); - final Map entities = _entityClient.batchGetV2( - Constants.EXECUTION_REQUEST_ENTITY_NAME, - relatedExecRequests, - ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME), - context.getAuthentication()); - - // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests - final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); - result.setStart(executionsSearchResult.getFrom()); - result.setCount(executionsSearchResult.getPageSize()); - result.setTotal(executionsSearchResult.getNumEntities()); - result.setExecutionRequests(IngestionResolverUtils.mapExecutionRequests( - executionsSearchResult.getEntities() - .stream() - .map(searchResult -> entities.get(searchResult.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()) - )); - return result; - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to resolve executions associated with ingestion source with urn %s", urn), e); - } - }); + // 3. Map the GMS ExecutionRequests into GraphQL Execution Requests + final IngestionSourceExecutionRequests result = new IngestionSourceExecutionRequests(); + result.setStart(executionsSearchResult.getFrom()); + result.setCount(executionsSearchResult.getPageSize()); + result.setTotal(executionsSearchResult.getNumEntities()); + result.setExecutionRequests( + IngestionResolverUtils.mapExecutionRequests( + executionsSearchResult.getEntities().stream() + .map(searchResult -> entities.get(searchResult.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to resolve executions associated with ingestion source with urn %s", + urn), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java index 05fcacf7c09460..0b909dee513747 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -7,11 +9,8 @@ import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - public class RollbackIngestionResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -20,33 +19,36 @@ public RollbackIngestionResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (!IngestionAuthUtils.canManageIngestion(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!IngestionAuthUtils.canManageIngestion(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - final RollbackIngestionInput input = bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); - final String runId = input.getRunId(); + final RollbackIngestionInput input = + bindArgument(environment.getArgument("input"), RollbackIngestionInput.class); + final String runId = input.getRunId(); - rollbackIngestion(runId, context); - return true; - }); + rollbackIngestion(runId, context); + return true; + }); } - public CompletableFuture rollbackIngestion(final String runId, final QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.rollbackIngestion(runId, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to rollback ingestion execution", e); - } - }); - + public CompletableFuture rollbackIngestion( + final String runId, final QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.rollbackIngestion(runId, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to rollback ingestion execution", e); + } + }); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java index e1745031d9daed..577780e53ce862 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -17,23 +21,16 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the MANAGE_SECRETS privilege. + * Creates an encrypted DataHub secret. Uses AES symmetric encryption / decryption. Requires the + * MANAGE_SECRETS privilege. */ public class CreateSecretResolver implements DataFetcher> { private final EntityClient _entityClient; private final SecretService _secretService; - public CreateSecretResolver( - final EntityClient entityClient, - final SecretService secretService - ) { + public CreateSecretResolver(final EntityClient entityClient, final SecretService secretService) { _entityClient = entityClient; _secretService = secretService; } @@ -41,36 +38,46 @@ public CreateSecretResolver( @Override public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateSecretInput input = bindArgument(environment.getArgument("input"), CreateSecretInput.class); + final CreateSecretInput input = + bindArgument(environment.getArgument("input"), CreateSecretInput.class); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageSecrets(context)) { - if (IngestionAuthUtils.canManageSecrets(context)) { + try { + // Create the Ingestion source key --> use the display name as a unique id to ensure + // it's not duplicated. + final DataHubSecretKey key = new DataHubSecretKey(); + key.setId(input.getName()); - try { - // Create the Ingestion source key --> use the display name as a unique id to ensure it's not duplicated. - final DataHubSecretKey key = new DataHubSecretKey(); - key.setId(input.getName()); + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Secret already exists!"); + } - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, SECRETS_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Secret already exists!"); - } - - // Create the secret value. - final DataHubSecretValue value = new DataHubSecretValue(); - value.setName(input.getName()); - value.setValue(_secretService.encrypt(input.getValue())); - value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn(context.getActorUrn())).setTime(System.currentTimeMillis())); + // Create the secret value. + final DataHubSecretValue value = new DataHubSecretValue(); + value.setName(input.getName()); + value.setValue(_secretService.encrypt(input.getValue())); + value.setDescription(input.getDescription(), SetMode.IGNORE_NULL); + value.setCreated( + new AuditStamp() + .setActor(UrnUtils.getUrn(context.getActorUrn())) + .setTime(System.currentTimeMillis())); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, SECRETS_ENTITY_NAME, - SECRET_VALUE_ASPECT_NAME, value); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create new secret with name %s", input.getName()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, SECRETS_ENTITY_NAME, SECRET_VALUE_ASPECT_NAME, value); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create new secret with name %s", input.getName()), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java index b35931420c0787..228d5a094cdefa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. - */ +/** Hard deletes a particular DataHub secret. Requires the MANAGE_SECRETS privilege. */ public class DeleteSecretResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -27,15 +24,19 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageSecrets(context)) { final String secretUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(secretUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return secretUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against secret with urn %s", secretUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return secretUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against secret with urn %s", secretUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java index 85c6c6754470db..67564aa721bdac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -23,11 +25,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / decryption. - * Requires the MANAGE_SECRETS privilege. + * Retrieves the plaintext values of secrets stored in DataHub. Uses AES symmetric encryption / + * decryption. Requires the MANAGE_SECRETS privilege. */ public class GetSecretValuesResolver implements DataFetcher>> { @@ -35,60 +35,67 @@ public class GetSecretValuesResolver implements DataFetcher> get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture> get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final GetSecretValuesInput input = bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); + final GetSecretValuesInput input = + bindArgument(environment.getArgument("input"), GetSecretValuesInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - // Fetch secrets - final Set urns = input.getSecrets() - .stream() - .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) - .collect(Collectors.toSet()); + return CompletableFuture.supplyAsync( + () -> { + try { + // Fetch secrets + final Set urns = + input.getSecrets().stream() + .map(urnStr -> Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, urnStr)) + .collect(Collectors.toSet()); - final Map entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(urns), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>(urns), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); - // Now for each secret, decrypt and return the value. If no secret was found, then we will simply omit it from the list. - // There is no ordering guarantee for the list. - return entities.values() - .stream() - .map(entity -> { - EnvelopedAspect aspect = entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); - if (aspect != null) { - // Aspect is present. - final DataHubSecretValue secretValue = new DataHubSecretValue(aspect.getValue().data()); - // Now decrypt the encrypted secret. - final String decryptedSecretValue = decryptSecret(secretValue.getValue()); - return new SecretValue(secretValue.getName(), decryptedSecretValue); - } else { - // No secret exists - return null; - } - }) - .filter(Objects::nonNull) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + // Now for each secret, decrypt and return the value. If no secret was found, then we + // will simply omit it from the list. + // There is no ordering guarantee for the list. + return entities.values().stream() + .map( + entity -> { + EnvelopedAspect aspect = + entity.getAspects().get(Constants.SECRET_VALUE_ASPECT_NAME); + if (aspect != null) { + // Aspect is present. + final DataHubSecretValue secretValue = + new DataHubSecretValue(aspect.getValue().data()); + // Now decrypt the encrypted secret. + final String decryptedSecretValue = decryptSecret(secretValue.getValue()); + return new SecretValue(secretValue.getName(), decryptedSecretValue); + } else { + // No secret exists + return null; + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private String decryptSecret(final String encryptedSecret) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java index b0d8c9fd34303c..eb054295af09b3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; @@ -31,13 +34,7 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. - */ +/** Lists all secrets present within DataHub. Requires the MANAGE_SECRETS privilege. */ @Slf4j public class ListSecretsResolver implements DataFetcher> { @@ -52,55 +49,66 @@ public ListSecretsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageSecrets(context)) { - final ListSecretsInput input = bindArgument(environment.getArgument("input"), ListSecretsInput.class); + final ListSecretsInput input = + bindArgument(environment.getArgument("input"), ListSecretsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all secrets - final SearchResult gmsResult = _entityClient.search( - Constants.SECRETS_ENTITY_NAME, - query, - null, - new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - // Then, resolve all secrets - final Map entities = _entityClient.batchGetV2( - Constants.SECRETS_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), - context.getAuthentication()); - - // Now that we have entities we can bind this to a result. - final ListSecretsResult result = new ListSecretsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setSecrets(mapEntities(gmsResult.getEntities().stream() - .map(entity -> entities.get(entity.getEntity())) - .filter(Objects::nonNull) - .collect(Collectors.toList()))); - return result; - - } catch (Exception e) { - throw new RuntimeException("Failed to list secrets", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all secrets + final SearchResult gmsResult = + _entityClient.search( + Constants.SECRETS_ENTITY_NAME, + query, + null, + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + // Then, resolve all secrets + final Map entities = + _entityClient.batchGetV2( + Constants.SECRETS_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME), + context.getAuthentication()); + + // Now that we have entities we can bind this to a result. + final ListSecretsResult result = new ListSecretsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setSecrets( + mapEntities( + gmsResult.getEntities().stream() + .map(entity -> entities.get(entity.getEntity())) + .filter(Objects::nonNull) + .collect(Collectors.toList()))); + return result; + + } catch (Exception e) { + throw new RuntimeException("Failed to list secrets", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List mapEntities(final List entities) { @@ -113,7 +121,8 @@ private List mapEntities(final List entities) { final EnvelopedAspect envelopedInfo = aspects.get(Constants.SECRET_VALUE_ASPECT_NAME); // Bind into a strongly typed object. - final DataHubSecretValue secretValue = new DataHubSecretValue(envelopedInfo.getValue().data()); + final DataHubSecretValue secretValue = + new DataHubSecretValue(envelopedInfo.getValue().data()); // Map using the strongly typed object. results.add(mapSecretValue(entityUrn, secretValue)); @@ -128,4 +137,4 @@ private Secret mapSecretValue(final Urn urn, final DataHubSecretValue value) { result.setDescription(value.getDescription(GetMode.NULL)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java index e510a9fff80aa4..225a5801adec94 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/SecretUtils.java @@ -8,10 +8,7 @@ import javax.crypto.Cipher; import javax.crypto.spec.SecretKeySpec; - -/** - * Utility methods to encrypt and decrypt DataHub secrets. - */ +/** Utility methods to encrypt and decrypt DataHub secrets. */ public class SecretUtils { static String encrypt(String value, String secret) { @@ -30,7 +27,8 @@ static String encrypt(String value, String secret) { } Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding"); cipher.init(Cipher.ENCRYPT_MODE, secretKey); - return Base64.getEncoder().encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); + return Base64.getEncoder() + .encodeToString(cipher.doFinal(value.getBytes(StandardCharsets.UTF_8))); } catch (Exception e) { throw new RuntimeException("Failed to encrypt value using provided secret!"); } @@ -59,6 +57,5 @@ static String decrypt(String encryptedValue, String secret) { return null; } - private SecretUtils() { - } + private SecretUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java index 38050331318ca8..0666fab52dd4ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolver.java @@ -9,10 +9,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - /** - * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires MANAGE_INGESTION - * privilege. + * Resolver responsible for hard deleting a particular DataHub Ingestion Source. Requires + * MANAGE_INGESTION privilege. */ public class DeleteIngestionSourceResolver implements DataFetcher> { @@ -28,15 +27,21 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (IngestionAuthUtils.canManageIngestion(context)) { final String ingestionSourceUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(ingestionSourceUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return ingestionSourceUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against ingestion source with urn %s", ingestionSourceUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return ingestionSourceUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform delete against ingestion source with urn %s", + ingestionSourceUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java index 562d06b79d2c79..3b6790212ba235 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolver.java @@ -19,9 +19,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -/** - * Gets a particular Ingestion Source by urn. - */ +/** Gets a particular Ingestion Source by urn. */ @Slf4j public class GetIngestionSourceResolver implements DataFetcher> { @@ -32,31 +30,37 @@ public GetIngestionSourceResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { final String urnStr = environment.getArgument("urn"); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = Urn.createFromString(urnStr); - final Map entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(ImmutableSet.of(urn)), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), - context.getAuthentication()); - if (!entities.containsKey(urn)) { - // No ingestion source found - throw new DataHubGraphQLException(String.format("Failed to find Ingestion Source with urn %s", urn), DataHubGraphQLErrorCode.NOT_FOUND); - } - // Ingestion source found - return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve ingestion source", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = Urn.createFromString(urnStr); + final Map entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>(ImmutableSet.of(urn)), + ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME), + context.getAuthentication()); + if (!entities.containsKey(urn)) { + // No ingestion source found + throw new DataHubGraphQLException( + String.format("Failed to find Ingestion Source with urn %s", urn), + DataHubGraphQLErrorCode.NOT_FOUND); + } + // Ingestion source found + return IngestionResolverUtils.mapIngestionSource(entities.get(urn)); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve ingestion source", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java index d019473606e58e..51c9e30aadcce1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourcesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -26,12 +28,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. - */ -public class ListIngestionSourcesResolver implements DataFetcher> { +/** Lists all ingestion sources stored within DataHub. Requires the MANAGE_INGESTION privilege. */ +public class ListIngestionSourcesResolver + implements DataFetcher> { private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,57 +43,74 @@ public ListIngestionSourcesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); if (IngestionAuthUtils.canManageIngestion(context)) { - final ListIngestionSourcesInput input = bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); + final ListIngestionSourcesInput input = + bindArgument(environment.getArgument("input"), ListIngestionSourcesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all ingestion sources Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.INGESTION_SOURCE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - null, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all ingestion sources Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.INGESTION_SOURCE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + null, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, resolve all ingestion sources - final Map entities = _entityClient.batchGetV2( - Constants.INGESTION_SOURCE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList())), - ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), - context.getAuthentication()); + // Then, resolve all ingestion sources + final Map entities = + _entityClient.batchGetV2( + Constants.INGESTION_SOURCE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME), + context.getAuthentication()); - final Collection sortedEntities = entities.values() - .stream() - .sorted(Comparator.comparingLong(s -> -s.getAspects().get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME).getCreated().getTime())) - .collect(Collectors.toList()); + final Collection sortedEntities = + entities.values().stream() + .sorted( + Comparator.comparingLong( + s -> + -s.getAspects() + .get(Constants.INGESTION_SOURCE_KEY_ASPECT_NAME) + .getCreated() + .getTime())) + .collect(Collectors.toList()); - // Now that we have entities we can bind this to a result. - final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setIngestionSources(IngestionResolverUtils.mapIngestionSources(sortedEntities)); - return result; + // Now that we have entities we can bind this to a result. + final ListIngestionSourcesResult result = new ListIngestionSourcesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setIngestionSources( + IngestionResolverUtils.mapIngestionSources(sortedEntities)); + return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list ingestion sources", e); - } - }); + } catch (Exception e) { + throw new RuntimeException("Failed to list ingestion sources", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java index 68e334bd976f8e..6194452e4b6fe5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; @@ -19,23 +23,15 @@ import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import java.util.Map; -import java.util.stream.Collectors; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; +import java.util.Map; import java.util.Optional; import java.util.UUID; import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. - */ +/** Creates or updates an ingestion source. Requires the MANAGE_INGESTION privilege. */ @Slf4j public class UpsertIngestionSourceResolver implements DataFetcher> { @@ -49,46 +45,60 @@ public UpsertIngestionSourceResolver(final EntityClient entityClient) { public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (IngestionAuthUtils.canManageIngestion(context)) { - - final Optional ingestionSourceUrn = Optional.ofNullable(environment.getArgument("urn")); - final UpdateIngestionSourceInput input = bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); - - // Create the policy info. - final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); - final MetadataChangeProposal proposal; - if (ingestionSourceUrn.isPresent()) { - // Update existing ingestion source - try { - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(ingestionSourceUrn.get()), INGESTION_INFO_ASPECT_NAME, info); - } catch (URISyntaxException e) { - throw new DataHubGraphQLException( - String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), - DataHubGraphQLErrorCode.BAD_REQUEST); + return CompletableFuture.supplyAsync( + () -> { + if (IngestionAuthUtils.canManageIngestion(context)) { + + final Optional ingestionSourceUrn = + Optional.ofNullable(environment.getArgument("urn")); + final UpdateIngestionSourceInput input = + bindArgument(environment.getArgument("input"), UpdateIngestionSourceInput.class); + + // Create the policy info. + final DataHubIngestionSourceInfo info = mapIngestionSourceInfo(input); + final MetadataChangeProposal proposal; + if (ingestionSourceUrn.isPresent()) { + // Update existing ingestion source + try { + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(ingestionSourceUrn.get()), + INGESTION_INFO_ASPECT_NAME, + info); + } catch (URISyntaxException e) { + throw new DataHubGraphQLException( + String.format("Malformed urn %s provided.", ingestionSourceUrn.get()), + DataHubGraphQLErrorCode.BAD_REQUEST); + } + } else { + // Create new ingestion source + // Since we are creating a new Ingestion Source, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = uuid.toString(); + final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); + key.setId(uuidStr); + proposal = + buildMetadataChangeProposalWithKey( + key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); + } + + try { + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to perform update against ingestion source with urn %s", + input.toString()), + e); + } } - } else { - // Create new ingestion source - // Since we are creating a new Ingestion Source, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = uuid.toString(); - final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); - key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, INGESTION_SOURCE_ENTITY_NAME, INGESTION_INFO_ASPECT_NAME, info); - } - - try { - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against ingestion source with urn %s", input.toString()), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - private DataHubIngestionSourceInfo mapIngestionSourceInfo(final UpdateIngestionSourceInput input) { + private DataHubIngestionSourceInfo mapIngestionSourceInfo( + final UpdateIngestionSourceInput input) { final DataHubIngestionSourceInfo result = new DataHubIngestionSourceInfo(); result.setType(input.getType()); result.setName(input.getName()); @@ -113,15 +123,17 @@ private DataHubIngestionSourceConfig mapConfig(final UpdateIngestionSourceConfig result.setDebugMode(input.getDebugMode()); } if (input.getExtraArgs() != null) { - Map extraArgs = input.getExtraArgs() - .stream() - .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + Map extraArgs = + input.getExtraArgs().stream() + .collect( + Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); result.setExtraArgs(new StringMap(extraArgs)); } return result; } - private DataHubIngestionSourceSchedule mapSchedule(final UpdateIngestionSourceScheduleInput input) { + private DataHubIngestionSourceSchedule mapSchedule( + final UpdateIngestionSourceScheduleInput input) { final DataHubIngestionSourceSchedule result = new DataHubIngestionSourceSchedule(); result.setInterval(input.getInterval()); result.setTimezone(input.getTimezone()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java index ea61b5e258d8bb..06bad27e270620 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/DataJobRunsResolver.java @@ -32,10 +32,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -/** - * GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job - */ -public class DataJobRunsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching a list of Task Runs associated with a Data Job */ +public class DataJobRunsResolver + implements DataFetcher> { private static final String PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME = "parentTemplate"; private static final String CREATED_TIME_SEARCH_INDEX_FIELD_NAME = "created"; @@ -48,74 +47,76 @@ public DataJobRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final QueryContext context = environment.getContext(); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 2: Hydrate the incident entities - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 3: Map GMS incident model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 3: Map GMS incident model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); - - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } private Filter buildTaskRunsEntityFilter(final String entityUrn) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(PARENT_TEMPLATE_URN_SEARCH_INDEX_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java index 3ecf396f808b38..d595b1e513d75e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/jobs/EntityRunsResolver.java @@ -33,11 +33,9 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -/** - * GraphQL Resolver used for fetching the list of task runs associated with a Dataset. - */ -public class EntityRunsResolver implements DataFetcher> { +/** GraphQL Resolver used for fetching the list of task runs associated with a Dataset. */ +public class EntityRunsResolver + implements DataFetcher> { private static final String INPUT_FIELD_NAME = "inputs.keyword"; private static final String OUTPUT_FIELD_NAME = "outputs.keyword"; @@ -51,76 +49,84 @@ public EntityRunsResolver(final EntityClient entityClient) { @Override public CompletableFuture get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); - final String entityUrn = ((Entity) environment.getSource()).getUrn(); - final Integer start = environment.getArgumentOrDefault("start", 0); - final Integer count = environment.getArgumentOrDefault("count", 20); - final RelationshipDirection direction = RelationshipDirection.valueOf(environment.getArgumentOrDefault("direction", - RelationshipDirection.INCOMING.toString())); + final String entityUrn = ((Entity) environment.getSource()).getUrn(); + final Integer start = environment.getArgumentOrDefault("start", 0); + final Integer count = environment.getArgumentOrDefault("count", 20); + final RelationshipDirection direction = + RelationshipDirection.valueOf( + environment.getArgumentOrDefault( + "direction", RelationshipDirection.INCOMING.toString())); - try { - // Step 1: Fetch set of task runs associated with the target entity from the Search Index! - // We use the search index so that we can easily sort by the last updated time. - final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); - final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); - final SearchResult gmsResult = _entityClient.filter( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - filter, - sortCriterion, - start, - count, - context.getAuthentication()); - final List dataProcessInstanceUrns = gmsResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()); + try { + // Step 1: Fetch set of task runs associated with the target entity from the Search + // Index! + // We use the search index so that we can easily sort by the last updated time. + final Filter filter = buildTaskRunsEntityFilter(entityUrn, direction); + final SortCriterion sortCriterion = buildTaskRunsSortCriterion(); + final SearchResult gmsResult = + _entityClient.filter( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + filter, + sortCriterion, + start, + count, + context.getAuthentication()); + final List dataProcessInstanceUrns = + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()); - // Step 2: Hydrate the incident entities - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, - new HashSet<>(dataProcessInstanceUrns), - null, - context.getAuthentication()); + // Step 2: Hydrate the incident entities + final Map entities = + _entityClient.batchGetV2( + Constants.DATA_PROCESS_INSTANCE_ENTITY_NAME, + new HashSet<>(dataProcessInstanceUrns), + null, + context.getAuthentication()); - // Step 3: Map GMS instance model to GraphQL model - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataProcessInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - final List dataProcessInstances = gmsResults.stream() - .filter(Objects::nonNull) - .map(DataProcessInstanceMapper::map) - .collect(Collectors.toList()); + // Step 3: Map GMS instance model to GraphQL model + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataProcessInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + final List dataProcessInstances = + gmsResults.stream() + .filter(Objects::nonNull) + .map(DataProcessInstanceMapper::map) + .collect(Collectors.toList()); - // Step 4: Package and return result - final DataProcessInstanceResult result = new DataProcessInstanceResult(); - result.setCount(gmsResult.getPageSize()); - result.setStart(gmsResult.getFrom()); - result.setTotal(gmsResult.getNumEntities()); - result.setRuns(dataProcessInstances); - return result; - } catch (URISyntaxException | RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve incidents from GMS", e); - } - }); + // Step 4: Package and return result + final DataProcessInstanceResult result = new DataProcessInstanceResult(); + result.setCount(gmsResult.getPageSize()); + result.setStart(gmsResult.getFrom()); + result.setTotal(gmsResult.getNumEntities()); + result.setRuns(dataProcessInstances); + return result; + } catch (URISyntaxException | RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve incidents from GMS", e); + } + }); } - private Filter buildTaskRunsEntityFilter(final String entityUrn, final RelationshipDirection direction) { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField(direction.equals(RelationshipDirection.INCOMING) ? INPUT_FIELD_NAME : OUTPUT_FIELD_NAME) - .setCondition(Condition.EQUAL) - .setValue(entityUrn) - )); + private Filter buildTaskRunsEntityFilter( + final String entityUrn, final RelationshipDirection direction) { + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField( + direction.equals(RelationshipDirection.INCOMING) + ? INPUT_FIELD_NAME + : OUTPUT_FIELD_NAME) + .setCondition(Condition.EQUAL) + .setValue(entityUrn))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java index 8fc3a609006626..a0caef20a4755e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/lineage/UpdateLineageResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.lineage; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -16,10 +18,6 @@ import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -29,8 +27,9 @@ import java.util.Set; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor @@ -43,11 +42,13 @@ public class UpdateLineageResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - final UpdateLineageInput input = bindArgument(environment.getArgument("input"), UpdateLineageInput.class); + final UpdateLineageInput input = + bindArgument(environment.getArgument("input"), UpdateLineageInput.class); final List edgesToAdd = input.getEdgesToAdd(); final List edgesToRemove = input.getEdgesToRemove(); - // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage for each entity + // loop over edgesToAdd and edgesToRemove and ensure the actor has privileges to edit lineage + // for each entity checkPrivileges(context, edgesToAdd, edgesToRemove); // organize data to make updating lineage cleaner @@ -57,77 +58,118 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw downstreamUrns.addAll(downstreamToUpstreamsToAdd.keySet()); downstreamUrns.addAll(downstreamToUpstreamsToRemove.keySet()); - return CompletableFuture.supplyAsync(() -> { - // build MCP for every downstreamUrn - for (Urn downstreamUrn : downstreamUrns) { - if (!_entityService.exists(downstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); - } - - final List upstreamUrnsToAdd = downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); - final List upstreamUrnsToRemove = downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); - try { - switch (downstreamUrn.getEntityType()) { - case Constants.DATASET_ENTITY_NAME: - // need to filter out dataJobs since this is a valid lineage edge, but will be handled in the downstream direction for DataJobInputOutputs - final List filteredUpstreamUrnsToAdd = filterOutDataJobUrns(upstreamUrnsToAdd); - final List filteredUpstreamUrnsToRemove = filterOutDataJobUrns(upstreamUrnsToRemove); - - _lineageService.updateDatasetLineage(downstreamUrn, filteredUpstreamUrnsToAdd, filteredUpstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.CHART_ENTITY_NAME: - _lineageService.updateChartLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DASHBOARD_ENTITY_NAME: - _lineageService.updateDashboardLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - case Constants.DATA_JOB_ENTITY_NAME: - _lineageService.updateDataJobUpstreamLineage(downstreamUrn, upstreamUrnsToAdd, upstreamUrnsToRemove, actor, context.getAuthentication()); - break; - default: + return CompletableFuture.supplyAsync( + () -> { + // build MCP for every downstreamUrn + for (Urn downstreamUrn : downstreamUrns) { + if (!_entityService.exists(downstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", downstreamUrn)); + } + + final List upstreamUrnsToAdd = + downstreamToUpstreamsToAdd.getOrDefault(downstreamUrn, new ArrayList<>()); + final List upstreamUrnsToRemove = + downstreamToUpstreamsToRemove.getOrDefault(downstreamUrn, new ArrayList<>()); + try { + switch (downstreamUrn.getEntityType()) { + case Constants.DATASET_ENTITY_NAME: + // need to filter out dataJobs since this is a valid lineage edge, but will be + // handled in the downstream direction for DataJobInputOutputs + final List filteredUpstreamUrnsToAdd = + filterOutDataJobUrns(upstreamUrnsToAdd); + final List filteredUpstreamUrnsToRemove = + filterOutDataJobUrns(upstreamUrnsToRemove); + + _lineageService.updateDatasetLineage( + downstreamUrn, + filteredUpstreamUrnsToAdd, + filteredUpstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.CHART_ENTITY_NAME: + _lineageService.updateChartLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DASHBOARD_ENTITY_NAME: + _lineageService.updateDashboardLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + case Constants.DATA_JOB_ENTITY_NAME: + _lineageService.updateDataJobUpstreamLineage( + downstreamUrn, + upstreamUrnsToAdd, + upstreamUrnsToRemove, + actor, + context.getAuthentication()); + break; + default: + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", downstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", downstreamUrn), e); - } - } - - Map> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); - Map> upstreamToDownstreamsToRemove = getUpstreamToDownstreamMap(edgesToRemove); - Set upstreamUrns = new HashSet<>(); - upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); - upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); - - // build MCP for upstreamUrn if necessary - for (Urn upstreamUrn : upstreamUrns) { - if (!_entityService.exists(upstreamUrn)) { - throw new IllegalArgumentException(String.format("Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); - } - - final List downstreamUrnsToAdd = upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); - final List downstreamUrnsToRemove = upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); - try { - if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { - // need to filter out dataJobs since this is a valid lineage edge, but is handled in the upstream direction for DataJobs - final List filteredDownstreamUrnsToAdd = filterOutDataJobUrns(downstreamUrnsToAdd); - final List filteredDownstreamUrnsToRemove = filterOutDataJobUrns(downstreamUrnsToRemove); - - _lineageService.updateDataJobDownstreamLineage( - upstreamUrn, filteredDownstreamUrnsToAdd, filteredDownstreamUrnsToRemove, actor, context.getAuthentication() - ); + + Map> upstreamToDownstreamsToAdd = getUpstreamToDownstreamMap(edgesToAdd); + Map> upstreamToDownstreamsToRemove = + getUpstreamToDownstreamMap(edgesToRemove); + Set upstreamUrns = new HashSet<>(); + upstreamUrns.addAll(upstreamToDownstreamsToAdd.keySet()); + upstreamUrns.addAll(upstreamToDownstreamsToRemove.keySet()); + + // build MCP for upstreamUrn if necessary + for (Urn upstreamUrn : upstreamUrns) { + if (!_entityService.exists(upstreamUrn)) { + throw new IllegalArgumentException( + String.format( + "Cannot upsert lineage as downstream urn %s doesn't exist", upstreamUrn)); + } + + final List downstreamUrnsToAdd = + upstreamToDownstreamsToAdd.getOrDefault(upstreamUrn, new ArrayList<>()); + final List downstreamUrnsToRemove = + upstreamToDownstreamsToRemove.getOrDefault(upstreamUrn, new ArrayList<>()); + try { + if (upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) { + // need to filter out dataJobs since this is a valid lineage edge, but is handled in + // the upstream direction for DataJobs + final List filteredDownstreamUrnsToAdd = + filterOutDataJobUrns(downstreamUrnsToAdd); + final List filteredDownstreamUrnsToRemove = + filterOutDataJobUrns(downstreamUrnsToRemove); + + _lineageService.updateDataJobDownstreamLineage( + upstreamUrn, + filteredDownstreamUrnsToAdd, + filteredDownstreamUrnsToRemove, + actor, + context.getAuthentication()); + } + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update lineage for urn %s", upstreamUrn), e); + } } - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update lineage for urn %s", upstreamUrn), e); - } - } - return true; - }); + return true; + }); } private List filterOutDataJobUrns(@Nonnull final List urns) { - return urns.stream().filter( - upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME) - ).collect(Collectors.toList()); + return urns.stream() + .filter(upstreamUrn -> !upstreamUrn.getEntityType().equals(Constants.DATA_JOB_ENTITY_NAME)) + .collect(Collectors.toList()); } private Map> getDownstreamToUpstreamsMap(@Nonnull final List edges) { @@ -156,7 +198,10 @@ private Map> getUpstreamToDownstreamMap(@Nonnull final List edgesToAdd, - @Nonnull final List edgesToRemove - ) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - DisjunctivePrivilegeGroup editLineagePrivileges = new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - new ConjunctivePrivilegeGroup(Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())) - )); + @Nonnull final List edgesToRemove) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + DisjunctivePrivilegeGroup editLineagePrivileges = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + allPrivilegesGroup, + new ConjunctivePrivilegeGroup( + Collections.singletonList(PoliciesConfig.EDIT_LINEAGE_PRIVILEGE.getType())))); for (LineageEdge edgeToAdd : edgesToAdd) { checkLineageEdgePrivileges(context, edgeToAdd, editLineagePrivileges); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java index 023686b1d10c99..7f031cb4818529 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/AspectResolver.java @@ -8,21 +8,19 @@ import java.util.concurrent.CompletableFuture; import org.dataloader.DataLoader; - /** * Generic GraphQL resolver responsible for * - * 1. Generating a single input AspectLoadKey. - * 2. Resolving a single {@link Aspect}. - * + *

1. Generating a single input AspectLoadKey. 2. Resolving a single {@link Aspect}. */ public class AspectResolver implements DataFetcher> { - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader("Aspect"); - final String fieldName = environment.getField().getName(); - final Long version = environment.getArgument("version"); - final String urn = ((Entity) environment.getSource()).getUrn(); - return loader.load(new VersionedAspectKey(urn, fieldName, version)); - } + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader("Aspect"); + final String fieldName = environment.getField().getName(); + final Long version = environment.getArgument("version"); + final String urn = ((Entity) environment.getSource()).getUrn(); + return loader.load(new VersionedAspectKey(urn, fieldName, version)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java index 20e0e4ae1c22a9..ecf36769dfa9f8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/BatchGetEntitiesResolver.java @@ -5,7 +5,6 @@ import com.linkedin.datahub.graphql.resolvers.BatchLoadUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -21,8 +20,7 @@ public class BatchGetEntitiesResolver implements DataFetcher> entityTypes, - final Function> entitiesProvider - ) { + final Function> entitiesProvider) { _entityTypes = entityTypes; _entitiesProvider = entitiesProvider; } @@ -32,22 +30,28 @@ public CompletableFuture> get(DataFetchingEnvironment environment) final List entities = _entitiesProvider.apply(environment); Map> entityTypeToEntities = new HashMap<>(); - entities.forEach((entity) -> { - EntityType type = entity.getType(); - List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); - entitiesList.add(entity); - entityTypeToEntities.put(type, entitiesList); - }); + entities.forEach( + (entity) -> { + EntityType type = entity.getType(); + List entitiesList = entityTypeToEntities.getOrDefault(type, new ArrayList<>()); + entitiesList.add(entity); + entityTypeToEntities.put(type, entitiesList); + }); List>> entitiesFutures = new ArrayList<>(); for (Map.Entry> entry : entityTypeToEntities.entrySet()) { - CompletableFuture> entitiesFuture = BatchLoadUtils - .batchLoadEntitiesOfSameType(entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); + CompletableFuture> entitiesFuture = + BatchLoadUtils.batchLoadEntitiesOfSameType( + entry.getValue(), _entityTypes, environment.getDataLoaderRegistry()); entitiesFutures.add(entitiesFuture); } return CompletableFuture.allOf(entitiesFutures.toArray(new CompletableFuture[0])) - .thenApply(v -> entitiesFutures.stream().flatMap(future -> future.join().stream()).collect(Collectors.toList())); + .thenApply( + v -> + entitiesFutures.stream() + .flatMap(future -> future.join().stream()) + .collect(Collectors.toList())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java index d44f2b77029f35..c63ec819e8f6a1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityLineageResultResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityLineageResult; @@ -17,15 +19,14 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub graph. - * Lineage relationship denotes whether an entity is directly upstream or downstream of another entity + * GraphQL Resolver responsible for fetching lineage relationships between entities in the DataHub + * graph. Lineage relationship denotes whether an entity is directly upstream or downstream of + * another entity */ @Slf4j -public class EntityLineageResultResolver implements DataFetcher> { +public class EntityLineageResultResolver + implements DataFetcher> { private final SiblingGraphService _siblingGraphService; @@ -39,38 +40,34 @@ public CompletableFuture get(DataFetchingEnvironment enviro final LineageInput input = bindArgument(environment.getArgument("input"), LineageInput.class); final LineageDirection lineageDirection = input.getDirection(); - @Nullable - final Integer start = input.getStart(); // Optional! - @Nullable - final Integer count = input.getCount(); // Optional! - @Nullable - final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! - @Nullable - final Long startTimeMillis = input.getStartTimeMillis(); // Optional! - @Nullable - final Long endTimeMillis = input.getEndTimeMillis(); // Optional! + @Nullable final Integer start = input.getStart(); // Optional! + @Nullable final Integer count = input.getCount(); // Optional! + @Nullable final Boolean separateSiblings = input.getSeparateSiblings(); // Optional! + @Nullable final Long startTimeMillis = input.getStartTimeMillis(); // Optional! + @Nullable final Long endTimeMillis = input.getEndTimeMillis(); // Optional! com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - return mapEntityRelationships( - _siblingGraphService.getLineage( - Urn.createFromString(urn), - resolvedDirection, - start != null ? start : 0, - count != null ? count : 100, - 1, - separateSiblings != null ? input.getSeparateSiblings() : false, - new HashSet<>(), - startTimeMillis, - endTimeMillis)); - } catch (URISyntaxException e) { - log.error("Failed to fetch lineage for {}", urn); - throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return mapEntityRelationships( + _siblingGraphService.getLineage( + Urn.createFromString(urn), + resolvedDirection, + start != null ? start : 0, + count != null ? count : 100, + 1, + separateSiblings != null ? input.getSeparateSiblings() : false, + new HashSet<>(), + startTimeMillis, + endTimeMillis)); + } catch (URISyntaxException e) { + log.error("Failed to fetch lineage for {}", urn); + throw new RuntimeException(String.format("Failed to fetch lineage for {}", urn), e); + } + }); } private EntityLineageResult mapEntityRelationships( @@ -80,10 +77,10 @@ private EntityLineageResult mapEntityRelationships( result.setCount(entityLineageResult.getCount()); result.setTotal(entityLineageResult.getTotal()); result.setFiltered(entityLineageResult.getFiltered()); - result.setRelationships(entityLineageResult.getRelationships() - .stream() - .map(this::mapEntityRelationship) - .collect(Collectors.toList())); + result.setRelationships( + entityLineageResult.getRelationships().stream() + .map(this::mapEntityRelationship) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java index 43b28ef85f78ae..223548d5d62427 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityRelationshipsResultResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.load; -import com.linkedin.common.EntityRelationship; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.EntityRelationship; import com.linkedin.common.EntityRelationships; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; @@ -17,13 +18,11 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * GraphQL Resolver responsible for fetching relationships between entities in the DataHub graph. */ -public class EntityRelationshipsResultResolver implements DataFetcher> { +public class EntityRelationshipsResultResolver + implements DataFetcher> { private final GraphClient _graphClient; @@ -35,24 +34,22 @@ public EntityRelationshipsResultResolver(final GraphClient graphClient) { public CompletableFuture get(DataFetchingEnvironment environment) { final QueryContext context = environment.getContext(); final String urn = ((Entity) environment.getSource()).getUrn(); - final RelationshipsInput input = bindArgument(environment.getArgument("input"), RelationshipsInput.class); + final RelationshipsInput input = + bindArgument(environment.getArgument("input"), RelationshipsInput.class); final List relationshipTypes = input.getTypes(); - final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = input.getDirection(); + final com.linkedin.datahub.graphql.generated.RelationshipDirection relationshipDirection = + input.getDirection(); final Integer start = input.getStart(); // Optional! final Integer count = input.getCount(); // Optional! - final RelationshipDirection resolvedDirection = RelationshipDirection.valueOf(relationshipDirection.toString()); - return CompletableFuture.supplyAsync(() -> mapEntityRelationships( - fetchEntityRelationships( - urn, - relationshipTypes, - resolvedDirection, - start, - count, - context.getActorUrn() - ), - resolvedDirection - )); + final RelationshipDirection resolvedDirection = + RelationshipDirection.valueOf(relationshipDirection.toString()); + return CompletableFuture.supplyAsync( + () -> + mapEntityRelationships( + fetchEntityRelationships( + urn, relationshipTypes, resolvedDirection, start, count, context.getActorUrn()), + resolvedDirection)); } private EntityRelationships fetchEntityRelationships( @@ -68,23 +65,28 @@ private EntityRelationships fetchEntityRelationships( private EntityRelationshipsResult mapEntityRelationships( final EntityRelationships entityRelationships, - final RelationshipDirection relationshipDirection - ) { + final RelationshipDirection relationshipDirection) { final EntityRelationshipsResult result = new EntityRelationshipsResult(); result.setStart(entityRelationships.getStart()); result.setCount(entityRelationships.getCount()); result.setTotal(entityRelationships.getTotal()); - result.setRelationships(entityRelationships.getRelationships().stream().map(entityRelationship -> mapEntityRelationship( - com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf(relationshipDirection.name()), - entityRelationship) - ).collect(Collectors.toList())); + result.setRelationships( + entityRelationships.getRelationships().stream() + .map( + entityRelationship -> + mapEntityRelationship( + com.linkedin.datahub.graphql.generated.RelationshipDirection.valueOf( + relationshipDirection.name()), + entityRelationship)) + .collect(Collectors.toList())); return result; } private com.linkedin.datahub.graphql.generated.EntityRelationship mapEntityRelationship( final com.linkedin.datahub.graphql.generated.RelationshipDirection direction, final EntityRelationship entityRelationship) { - final com.linkedin.datahub.graphql.generated.EntityRelationship result = new com.linkedin.datahub.graphql.generated.EntityRelationship(); + final com.linkedin.datahub.graphql.generated.EntityRelationship result = + new com.linkedin.datahub.graphql.generated.EntityRelationship(); final Entity partialEntity = UrnToEntityMapper.map(entityRelationship.getEntity()); if (partialEntity != null) { result.setEntity(partialEntity); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java index 6a32e0b14e3130..d298c344240c74 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeBatchResolver.java @@ -8,31 +8,27 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Function; - /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + *

1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeBatchResolver implements DataFetcher>> { - private final List> _entityTypes; - private final Function> _entitiesProvider; + private final List> _entityTypes; + private final Function> _entitiesProvider; - public EntityTypeBatchResolver( - final List> entityTypes, - final Function> entitiesProvider - ) { - _entityTypes = entityTypes; - _entitiesProvider = entitiesProvider; - } + public EntityTypeBatchResolver( + final List> entityTypes, + final Function> entitiesProvider) { + _entityTypes = entityTypes; + _entitiesProvider = entitiesProvider; + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final List entities = _entitiesProvider.apply(environment); - return BatchLoadUtils.batchLoadEntitiesOfSameType(entities, _entityTypes, environment.getDataLoaderRegistry()); - } + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final List entities = _entitiesProvider.apply(environment); + return BatchLoadUtils.batchLoadEntitiesOfSameType( + entities, _entityTypes, environment.getDataLoaderRegistry()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java index 29d5d78e0ea96a..3c285f30661bce 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/EntityTypeResolver.java @@ -5,64 +5,65 @@ import com.linkedin.datahub.graphql.generated.Entity; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; import java.util.stream.Collectors; +import org.dataloader.DataLoader; /** * GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single Entity - * - * + *

1. Retrieving a single input urn. 2. Resolving a single Entity */ public class EntityTypeResolver implements DataFetcher> { - private static final List IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); - private final List> _entityTypes; - private final Function _entityProvider; + private static final List IDENTITY_FIELDS = ImmutableList.of("__typename", "urn", "type"); + private final List> _entityTypes; + private final Function _entityProvider; - public EntityTypeResolver( - final List> entityTypes, - final Function entity - ) { - _entityTypes = entityTypes; - _entityProvider = entity; - } + public EntityTypeResolver( + final List> entityTypes, + final Function entity) { + _entityTypes = entityTypes; + _entityProvider = entity; + } + private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { + return environment.getField().getSelectionSet().getSelections().stream() + .filter( + selection -> { + if (!(selection instanceof graphql.language.Field)) { + return true; + } + return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); + }) + .count() + == 0; + } - private boolean isOnlySelectingIdentityFields(DataFetchingEnvironment environment) { - return environment.getField().getSelectionSet().getSelections().stream().filter(selection -> { - if (!(selection instanceof graphql.language.Field)) { - return true; - } - return !IDENTITY_FIELDS.contains(((graphql.language.Field) selection).getName()); - }).count() == 0; + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final Entity resolvedEntity = _entityProvider.apply(environment); + if (resolvedEntity == null) { + return CompletableFuture.completedFuture(null); } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final Entity resolvedEntity = _entityProvider.apply(environment); - if (resolvedEntity == null) { - return CompletableFuture.completedFuture(null); - } - - final Object javaObject = _entityProvider.apply(environment); + final Object javaObject = _entityProvider.apply(environment); - if (isOnlySelectingIdentityFields(environment)) { - return CompletableFuture.completedFuture(javaObject); - } + if (isOnlySelectingIdentityFields(environment)) { + return CompletableFuture.completedFuture(javaObject); + } - final com.linkedin.datahub.graphql.types.EntityType filteredEntity = Iterables.getOnlyElement(_entityTypes.stream() + final com.linkedin.datahub.graphql.types.EntityType filteredEntity = + Iterables.getOnlyElement( + _entityTypes.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + final Object key = filteredEntity.getKeyProvider().apply(resolvedEntity); - return loader.load(key); - } + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java index 02a92544855a3a..ee2f7c3abe97dd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeBatchResolver.java @@ -3,41 +3,42 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a batch of urns. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a batch of urns. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. * @param the key type for the DataLoader */ public class LoadableTypeBatchResolver implements DataFetcher>> { - private final LoadableType _loadableType; - private final Function> _keyProvider; + private final LoadableType _loadableType; + private final Function> _keyProvider; - public LoadableTypeBatchResolver(final LoadableType loadableType, final Function> keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeBatchResolver( + final LoadableType loadableType, + final Function> keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture> get(DataFetchingEnvironment environment) { - final List keys = _keyProvider.apply(environment); - if (keys == null) { - return null; - } - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.loadMany(keys); + @Override + public CompletableFuture> get(DataFetchingEnvironment environment) { + final List keys = _keyProvider.apply(environment); + if (keys == null) { + return null; } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.loadMany(keys); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java index 53702f9cafe8b4..3868b1a35b64f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/LoadableTypeResolver.java @@ -3,40 +3,41 @@ import com.linkedin.datahub.graphql.types.LoadableType; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import org.dataloader.DataLoader; - import java.util.concurrent.CompletableFuture; import java.util.function.Function; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. * @param the key type for the DataLoader */ public class LoadableTypeResolver implements DataFetcher> { - private final LoadableType _loadableType; - private final Function _keyProvider; + private final LoadableType _loadableType; + private final Function _keyProvider; - public LoadableTypeResolver(final LoadableType loadableType, final Function keyProvider) { - _loadableType = loadableType; - _keyProvider = keyProvider; - } + public LoadableTypeResolver( + final LoadableType loadableType, + final Function keyProvider) { + _loadableType = loadableType; + _keyProvider = keyProvider; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final K key = _keyProvider.apply(environment); - if (key == null) { - return null; - } - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); - return loader.load(key); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final K key = _keyProvider.apply(environment); + if (key == null) { + return null; } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(_loadableType.name()); + return loader.load(key); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java index a4867819a2401c..e85eaca127d625 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/OwnerTypeResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.load; +import com.google.common.collect.Iterables; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.OwnerType; import com.linkedin.datahub.graphql.types.LoadableType; @@ -8,38 +9,41 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.function.Function; -import org.dataloader.DataLoader; import java.util.stream.Collectors; -import com.google.common.collect.Iterables; +import org.dataloader.DataLoader; /** * Generic GraphQL resolver responsible for * - * 1. Retrieving a single input urn. - * 2. Resolving a single {@link LoadableType}. + *

1. Retrieving a single input urn. 2. Resolving a single {@link LoadableType}. * - * Note that this resolver expects that {@link DataLoader}s were registered - * for the provided {@link LoadableType} under the name provided by {@link LoadableType#name()} + *

Note that this resolver expects that {@link DataLoader}s were registered for the provided + * {@link LoadableType} under the name provided by {@link LoadableType#name()} * * @param the generated GraphQL POJO corresponding to the resolved type. */ public class OwnerTypeResolver implements DataFetcher> { - private final List> _loadableTypes; - private final Function _urnProvider; + private final List> _loadableTypes; + private final Function _urnProvider; - public OwnerTypeResolver(final List> loadableTypes, final Function urnProvider) { - _loadableTypes = loadableTypes; - _urnProvider = urnProvider; - } + public OwnerTypeResolver( + final List> loadableTypes, + final Function urnProvider) { + _loadableTypes = loadableTypes; + _urnProvider = urnProvider; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final OwnerType ownerType = _urnProvider.apply(environment); - final LoadableType filteredEntity = Iterables.getOnlyElement(_loadableTypes.stream() + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final OwnerType ownerType = _urnProvider.apply(environment); + final LoadableType filteredEntity = + Iterables.getOnlyElement( + _loadableTypes.stream() .filter(entity -> ownerType.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - final DataLoader loader = environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); - return loader.load(((Entity) ownerType).getUrn()); - } + final DataLoader loader = + environment.getDataLoaderRegistry().getDataLoader(filteredEntity.name()); + return loader.load(((Entity) ownerType).getUrn()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index f13ebf8373e91a..0d00823697c25b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.load; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authorization.EntitySpec; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -27,24 +29,21 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** - * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. - * The purpose of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" API - * to a single place. - * - * It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and limit arguments - * used for filtering the specific TimeSeries Aspects to be fetched. + * Generic GraphQL resolver responsible for resolving a list of TimeSeries Aspect Types. The purpose + * of this class is to consolidate the logic of calling the remote GMS "getTimeSeriesAspectValues" + * API to a single place. * - * On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping - * a generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil - * be invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. + *

It is expected that the query takes as input an optional startTimeMillis, endTimeMillis, and + * limit arguments used for filtering the specific TimeSeries Aspects to be fetched. * + *

On creation of a TimeSeriesAspectResolver, it is expected that a mapper capable of mapping a + * generic {@link EnvelopedAspect} to a GraphQL {@link TimeSeriesAspect} is provided. This wil be + * invoked for each {@link EnvelopedAspect} received from the GMS getTimeSeriesAspectValues API. */ @Slf4j -public class TimeSeriesAspectResolver implements DataFetcher>> { +public class TimeSeriesAspectResolver + implements DataFetcher>> { private final EntityClient _client; private final String _entityName; @@ -73,13 +72,13 @@ public TimeSeriesAspectResolver( _sort = sort; } - /** - * Check whether the actor is authorized to fetch the timeseries aspect given the resource urn - */ + /** Check whether the actor is authorized to fetch the timeseries aspect given the resource urn */ private boolean isAuthorized(QueryContext context, String urn) { - if (_entityName.equals(Constants.DATASET_ENTITY_NAME) && _aspectName.equals( - Constants.DATASET_PROFILE_ASPECT_NAME)) { - return AuthorizationUtils.isAuthorized(context, Optional.of(new EntitySpec(_entityName, urn)), + if (_entityName.equals(Constants.DATASET_ENTITY_NAME) + && _aspectName.equals(Constants.DATASET_PROFILE_ASPECT_NAME)) { + return AuthorizationUtils.isAuthorized( + context, + Optional.of(new EntitySpec(_entityName, urn)), PoliciesConfig.VIEW_DATASET_PROFILE_PRIVILEGE); } return true; @@ -87,46 +86,62 @@ private boolean isAuthorized(QueryContext context, String urn) { @Override public CompletableFuture> get(DataFetchingEnvironment environment) { - return CompletableFuture.supplyAsync(() -> { - - final QueryContext context = environment.getContext(); - // Fetch the urn, assuming the parent has an urn field. - // todo: what if the parent urn isn't projected? - final String urn = ((Entity) environment.getSource()).getUrn(); + return CompletableFuture.supplyAsync( + () -> { + final QueryContext context = environment.getContext(); + // Fetch the urn, assuming the parent has an urn field. + // todo: what if the parent urn isn't projected? + final String urn = ((Entity) environment.getSource()).getUrn(); - if (!isAuthorized(context, urn)) { - return Collections.emptyList(); - } + if (!isAuthorized(context, urn)) { + return Collections.emptyList(); + } - final Long maybeStartTimeMillis = environment.getArgumentOrDefault("startTimeMillis", null); - final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); - // Max number of aspects to return. - final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); - final FilterInput maybeFilters = environment.getArgument("filter") != null - ? bindArgument(environment.getArgument("filter"), FilterInput.class) - : null; - final SortCriterion maybeSort = _sort; + final Long maybeStartTimeMillis = + environment.getArgumentOrDefault("startTimeMillis", null); + final Long maybeEndTimeMillis = environment.getArgumentOrDefault("endTimeMillis", null); + // Max number of aspects to return. + final Integer maybeLimit = environment.getArgumentOrDefault("limit", null); + final FilterInput maybeFilters = + environment.getArgument("filter") != null + ? bindArgument(environment.getArgument("filter"), FilterInput.class) + : null; + final SortCriterion maybeSort = _sort; - try { - // Step 1: Get aspects. - List aspects = - _client.getTimeseriesAspectValues(urn, _entityName, _aspectName, maybeStartTimeMillis, maybeEndTimeMillis, - maybeLimit, buildFilters(maybeFilters), maybeSort, context.getAuthentication()); + try { + // Step 1: Get aspects. + List aspects = + _client.getTimeseriesAspectValues( + urn, + _entityName, + _aspectName, + maybeStartTimeMillis, + maybeEndTimeMillis, + maybeLimit, + buildFilters(maybeFilters), + maybeSort, + context.getAuthentication()); - // Step 2: Bind profiles into GraphQL strong types. - return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); - } catch (RemoteInvocationException e) { - throw new RuntimeException("Failed to retrieve aspects from GMS", e); - } - }); + // Step 2: Bind profiles into GraphQL strong types. + return aspects.stream().map(_aspectMapper).collect(Collectors.toList()); + } catch (RemoteInvocationException e) { + throw new RuntimeException("Failed to retrieve aspects from GMS", e); + } + }); } private Filter buildFilters(@Nullable FilterInput maybeFilters) { if (maybeFilters == null) { return null; } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(maybeFilters.getAnd().stream() - .map(filter -> criterionFromFilter(filter, true)) - .collect(Collectors.toList()))))); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + maybeFilters.getAnd().stream() + .map(filter -> criterionFromFilter(filter, true)) + .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java index 619ca95e7d9eda..bee46f8a18cf22 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddLinkResolver.java @@ -1,7 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.linkedin.common.urn.CorpuserUrn; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddLinkResolver implements DataFetcher> { @@ -35,41 +33,42 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw String linkLabel = input.getLabel(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn) + && !canUpdateGlossaryEntityLinks(targetUrn, environment.getContext())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { - log.debug("Adding Link. input: {}", input.toString()); + log.debug("Adding Link. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.addLink( - linkUrl, - linkLabel, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add link to resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to add link to resource with input %s", input.toString()), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.addLink(linkUrl, linkLabel, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to add link to resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to add link to resource with input %s", input.toString()), e); + } + }); } - // Returns whether this is a glossary entity and whether you can edit this glossary entity with the + // Returns whether this is a glossary entity and whether you can edit this glossary entity with + // the // Manage all children or Manage direct children privileges private boolean canUpdateGlossaryEntityLinks(Urn targetUrn, QueryContext context) { - final boolean isGlossaryEntity = targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) - || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); + final boolean isGlossaryEntity = + targetUrn.getEntityType().equals(Constants.GLOSSARY_TERM_ENTITY_NAME) + || targetUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME); if (!isGlossaryEntity) { return false; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java index 3f2dab0a5ba711..9c0d009ff9b0e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnerResolver implements DataFetcher> { @@ -42,28 +41,32 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw OwnerInput ownerInput = ownerInputBuilder.build(); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateAddOwnerInput(ownerInput, ownerUrn, _entityService); - try { + try { - log.debug("Adding Owner. input: {}", input); + log.debug("Adding Owner. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - ImmutableList.of(ownerInput), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owner to resource with input %s", input), e); - } - }); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + ImmutableList.of(ownerInput), + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owner to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owner to resource with input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java index 4e5b5bdb2a651d..c64b2403364c8e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddOwnersResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,9 +19,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddOwnersResolver implements DataFetcher> { @@ -29,37 +27,37 @@ public class AddOwnersResolver implements DataFetcher @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final AddOwnersInput input = bindArgument(environment.getArgument("input"), AddOwnersInput.class); + final AddOwnersInput input = + bindArgument(environment.getArgument("input"), AddOwnersInput.class); List owners = input.getOwners(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - OwnerUtils.validateAddOwnerInput( - owners, - targetUrn, - _entityService - ); - try { - - log.debug("Adding Owners. input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.addOwnersToResources( - owners, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); - throw new RuntimeException(String.format("Failed to add owners to resource with input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + OwnerUtils.validateAddOwnerInput(owners, targetUrn, _entityService); + try { + + log.debug("Adding Owners. input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.addOwnersToResources( + owners, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to add owners to resource with input {}, {}", input, e.getMessage()); + throw new RuntimeException( + String.format("Failed to add owners to resource with input %s", input), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java index 78d2341492b398..f4e3f7ed49056f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -17,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagResolver implements DataFetcher> { @@ -27,44 +25,54 @@ public class AddTagResolver implements DataFetcher> { @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { - if (!tagUrn.getEntityType().equals("tag")) { - log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals("tag")) { + log.error("Failed to add {}. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.info("Adding Tag. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info("Adding Tag. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java index 7174f3edffee67..4135e774172c9d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTagsResolver.java @@ -1,8 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; - import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,9 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class AddTagsResolver implements DataFetcher> { @@ -32,40 +30,47 @@ public class AddTagsResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final AddTagsInput input = bindArgument(environment.getArgument("input"), AddTagsInput.class); - List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - tagUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - false - ); - try { - log.info("Adding Tags. input: {}", input.toString()); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTagsToResources( - tagUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + LabelUtils.validateResourceAndLabel( + tagUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + false); + try { + log.info("Adding Tags. input: {}", input.toString()); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTagsToResources( + tagUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java index 056b5db4324c34..a776fda558a428 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,8 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermResolver implements DataFetcher> { @@ -25,39 +25,49 @@ public class AddTermResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java index 2f58b6b09e681b..4fbe74a0349b49 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/AddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -19,8 +21,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class AddTermsResolver implements DataFetcher> { @@ -29,41 +29,48 @@ public class AddTermsResolver implements DataFetcher> @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final AddTermsInput input = bindArgument(environment.getArgument("input"), AddTermsInput.class); - List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - return CompletableFuture.supplyAsync(() -> { - - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - LabelUtils.validateResourceAndLabel( - termUrns, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - false - ); + LabelUtils.validateResourceAndLabel( + termUrns, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + false); - try { - log.info("Adding Term. input: {}", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.addTermsToResources( - termUrns, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + log.info("Adding Term. input: {}", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.addTermsToResources( + termUrns, + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java index 5beaeecae673f0..94182835de159a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddOwnersResolver implements DataFetcher> { @@ -29,26 +28,30 @@ public class BatchAddOwnersResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final BatchAddOwnersInput input = bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); + final BatchAddOwnersInput input = + bindArgument(environment.getArgument("input"), BatchAddOwnersInput.class); final List owners = input.getOwners(); final List resources = input.getResources(); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateOwners(owners); - validateInputResources(resources, context); + // First, validate the batch + validateOwners(owners); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchAddOwners(owners, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + try { + // Then execute the bulk add + batchAddOwners(owners, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } private void validateOwners(List owners) { @@ -67,23 +70,32 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be applied to subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be applied to subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddOwners(List owners, List resources, QueryContext context) { + private void batchAddOwners( + List owners, List resources, QueryContext context) { log.debug("Batch adding owners. owners: {}, resources: {}", owners, resources); try { - OwnerUtils.addOwnersToResources(owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.addOwnersToResources( + owners, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Owners %s to resources with urns %s!", - owners, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Owners %s to resources with urns %s!", + owners, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java index 9c5cddb3c50bca..239ada16536952 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTagsResolver implements DataFetcher> { @@ -36,62 +33,64 @@ public class BatchAddTagsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTagsInput input = bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); - final List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTagsInput input = + bindArgument(environment.getArgument("input"), BatchAddTagsInput.class); + final List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTags(tagUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTagsToSingleSchemaField(context, resources, tagUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTags(tagUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTagsToSingleSchemaField(context, resources, tagUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding tags to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * tag to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the tag to one of its siblings. If that fails, keep trying all siblings until one passes or all + * fail. Then we throw if none succeed. */ private Boolean handleAddTagsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List resources, - @Nonnull final List tagUrns - ) { + @Nonnull final List tagUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, new HashSet<>(), siblingUrns); } /** - * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. - * Try adding until we attempt all siblings or one passes. Throw if none pass. + * Attempts to add tags to a schema field, and if it fails, try adding to one of its siblings. Try + * adding until we attempt all siblings or one passes. Throw if none pass. */ private Boolean attemptBatchAddTagsWithSiblings( @Nonnull final List tagUrns, @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTagsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add tags for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTagsWithSiblings(tagUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTagsWithSiblings( + tagUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTags(List tagUrns, List resources, QueryContext context) { - log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); - try { - LabelUtils.addTagsToResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + private void batchAddTags( + List tagUrns, List resources, QueryContext context) { + log.debug("Batch adding Tags. tags: {}, resources: {}", resources, tagUrns); + try { + LabelUtils.addTagsToResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to batch add Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); - } + } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java index a46f37b110f4eb..b6d799c13345db 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchAddTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -12,21 +14,16 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAddTermsResolver implements DataFetcher> { @@ -36,49 +33,52 @@ public class BatchAddTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchAddTermsInput input = bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchAddTermsInput input = + bindArgument(environment.getArgument("input"), BatchAddTermsInput.class); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { - - // First, validate the batch - validateTerms(termUrns); - - if (resources.size() == 1 && resources.get(0).getSubResource() != null) { - return handleAddTermsToSingleSchemaField(context, resources, termUrns); - } - - validateInputResources(resources, context); - - try { - // Then execute the bulk add - batchAddTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // First, validate the batch + validateTerms(termUrns); + + if (resources.size() == 1 && resources.get(0).getSubResource() != null) { + return handleAddTermsToSingleSchemaField(context, resources, termUrns); + } + + validateInputResources(resources, context); + + try { + // Then execute the bulk add + batchAddTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } /** * When adding terms to a schema field in the UI, there's a chance the parent entity has siblings. - * If the given urn doesn't have a schema or doesn't have the given column, we should try to add the - * term to one of its siblings. If that fails, keep trying all siblings until one passes or all fail. - * Then we throw if none succeed. + * If the given urn doesn't have a schema or doesn't have the given column, we should try to add + * the term to one of its siblings. If that fails, keep trying all siblings until one passes or + * all fail. Then we throw if none succeed. */ private Boolean handleAddTermsToSingleSchemaField( @Nonnull final QueryContext context, @Nonnull final List resources, - @Nonnull final List termUrns - ) { + @Nonnull final List termUrns) { final ResourceRefInput resource = resources.get(0); final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); final List siblingUrns = SiblingsUtils.getSiblingUrns(resourceUrn, _entityService); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, new HashSet<>(), siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, new HashSet<>(), siblingUrns); } /** @@ -90,8 +90,7 @@ private Boolean attemptBatchAddTermsWithSiblings( @Nonnull final ResourceRefInput resource, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(UrnUtils.getUrn(resource.getResourceUrn())); final List resources = new ArrayList<>(); resources.add(resource); @@ -106,13 +105,19 @@ private Boolean attemptBatchAddTermsWithSiblings( if (siblingUrn.isPresent()) { log.warn( "Failed to add terms for resourceUrn {} and subResource {}, trying sibling urn {} now.", - resource.getResourceUrn(), resource.getSubResource(), siblingUrn.get() - ); + resource.getResourceUrn(), + resource.getSubResource(), + siblingUrn.get()); resource.setResourceUrn(siblingUrn.get().toString()); - return attemptBatchAddTermsWithSiblings(termUrns, resource, context, attemptedUrns, siblingUrns); + return attemptBatchAddTermsWithSiblings( + termUrns, resource, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against resource {}, {}", resource.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against resource %s", resource.toString()), e); + log.error( + "Failed to perform update against resource {}, {}", + resource.toString(), + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against resource %s", resource.toString()), e); } } } @@ -132,20 +137,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchAddTerms(List termUrns, List resources, QueryContext context) { + private void batchAddTerms( + List termUrns, List resources, QueryContext context) { log.debug("Batch adding Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.addTermsToResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.addTermsToResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch add Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch add Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java index debd68646910f2..30e04ac36ee0f7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveOwnersResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveOwnersResolver implements DataFetcher> { @@ -29,27 +28,33 @@ public class BatchRemoveOwnersResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final BatchRemoveOwnersInput input = bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); + final BatchRemoveOwnersInput input = + bindArgument(environment.getArgument("input"), BatchRemoveOwnersInput.class); final List owners = input.getOwnerUrns(); final List resources = input.getResources(); - final Optional maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + final Optional maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk remove - batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk remove + batchRemoveOwners(owners, maybeOwnershipTypeUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -62,26 +67,40 @@ private void validateInputResource(ResourceRefInput resource, QueryContext conte final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (resource.getSubResource() != null) { - throw new IllegalArgumentException("Malformed input provided: owners cannot be removed from subresources."); + throw new IllegalArgumentException( + "Malformed input provided: owners cannot be removed from subresources."); } if (!OwnerUtils.isAuthorizedToUpdateOwners(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveOwners(List ownerUrns, Optional maybeOwnershipTypeUrn, - List resources, QueryContext context) { + private void batchRemoveOwners( + List ownerUrns, + Optional maybeOwnershipTypeUrn, + List resources, + QueryContext context) { log.debug("Batch removing owners. owners: {}, resources: {}", ownerUrns, resources); try { - OwnerUtils.removeOwnersFromResources(ownerUrns.stream().map(UrnUtils::getUrn).collect( - Collectors.toList()), maybeOwnershipTypeUrn, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + OwnerUtils.removeOwnersFromResources( + ownerUrns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()), + maybeOwnershipTypeUrn, + resources, + UrnUtils.getUrn(context.getActorUrn()), + _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch remove Owners %s to resources with urns %s!", - ownerUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch remove Owners %s to resources with urns %s!", + ownerUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java index ab432f0afcaec0..7500f29a0c67fc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTagsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTagsResolver implements DataFetcher> { @@ -28,26 +27,29 @@ public class BatchRemoveTagsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTagsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); - final List tagUrns = input.getTagUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTagsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTagsInput.class); + final List tagUrns = + input.getTagUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTags(tagUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTags(tagUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTags(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTags(List tagUrns, List resources, QueryContext context) { + private void batchRemoveTags( + List tagUrns, List resources, QueryContext context) { log.debug("Batch removing Tags. tags: {}, resources: {}", resources, tagUrns); try { - LabelUtils.removeTagsFromResources(tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTagsFromResources( + tagUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Tags %s to resources with urns %s!", - tagUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Tags %s to resources with urns %s!", + tagUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java index c8870cc44bf9e6..3706e4e911b174 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchRemoveTermsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchRemoveTermsResolver implements DataFetcher> { @@ -28,26 +27,29 @@ public class BatchRemoveTermsResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchRemoveTermsInput input = bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); - final List termUrns = input.getTermUrns().stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final BatchRemoveTermsInput input = + bindArgument(environment.getArgument("input"), BatchRemoveTermsInput.class); + final List termUrns = + input.getTermUrns().stream().map(UrnUtils::getUrn).collect(Collectors.toList()); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the batch - validateInputResources(resources, context); + // First, validate the batch + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchRemoveTerms(termUrns, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchRemoveTerms(termUrns, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -59,20 +61,28 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!LabelUtils.isAuthorizedToUpdateTerms(context, resourceUrn, resource.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchRemoveTerms(List termUrns, List resources, QueryContext context) { + private void batchRemoveTerms( + List termUrns, List resources, QueryContext context) { log.debug("Batch removing Terms. terms: {}, resources: {}", resources, termUrns); try { - LabelUtils.removeTermsFromResources(termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); + LabelUtils.removeTermsFromResources( + termUrns, resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to remove Terms %s to resources with urns %s!", - termUrns, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to remove Terms %s to resources with urns %s!", + termUrns, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java index 9b6167c673d8db..551878371b4897 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchSetDomainResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchSetDomainResolver implements DataFetcher> { @@ -30,25 +29,29 @@ public class BatchSetDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchSetDomainInput input = bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); + final BatchSetDomainInput input = + bindArgument(environment.getArgument("input"), BatchSetDomainInput.class); final String maybeDomainUrn = input.getDomainUrn(); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the domain - validateDomain(maybeDomainUrn); - validateInputResources(resources, context); + // First, validate the domain + validateDomain(maybeDomainUrn); + validateInputResources(resources, context); - try { - // Then execute the bulk add - batchSetDomains(maybeDomainUrn, resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk add + batchSetDomains(maybeDomainUrn, resources, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateDomain(@Nullable String maybeDomainUrn) { @@ -66,23 +69,31 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DomainUtils.isAuthorizedToUpdateDomainsForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchSetDomains(String maybeDomainUrn, List resources, QueryContext context) { + private void batchSetDomains( + String maybeDomainUrn, List resources, QueryContext context) { log.debug("Batch adding Domains. domainUrn: {}, resources: {}", maybeDomainUrn, resources); try { - DomainUtils.setDomainForResources(maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), + DomainUtils.setDomainForResources( + maybeDomainUrn == null ? null : UrnUtils.getUrn(maybeDomainUrn), resources, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch set Domain %s to resources with urns %s!", - maybeDomainUrn, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch set Domain %s to resources with urns %s!", + maybeDomainUrn, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java index 5961dc9087a638..e76617d119621a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateDeprecationResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateDeprecationResolver implements DataFetcher> { @@ -30,23 +29,32 @@ public class BatchUpdateDeprecationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateDeprecationInput input = bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); + final BatchUpdateDeprecationInput input = + bindArgument(environment.getArgument("input"), BatchUpdateDeprecationInput.class); final List resources = input.getResources(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the resources - validateInputResources(resources, context); + // First, validate the resources + validateInputResources(resources, context); - try { - // Then execute the bulk update - batchUpdateDeprecation(input.getDeprecated(), input.getNote(), input.getDecommissionTime(), resources, context); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk update + batchUpdateDeprecation( + input.getDeprecated(), + input.getNote(), + input.getDecommissionTime(), + resources, + context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } private void validateInputResources(List resources, QueryContext context) { @@ -58,17 +66,25 @@ private void validateInputResources(List resources, QueryConte private void validateInputResource(ResourceRefInput resource, QueryContext context) { final Urn resourceUrn = UrnUtils.getUrn(resource.getResourceUrn()); if (!DeprecationUtils.isAuthorizedToUpdateDeprecationForEntity(context, resourceUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - LabelUtils.validateResource(resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); + LabelUtils.validateResource( + resourceUrn, resource.getSubResource(), resource.getSubResourceType(), _entityService); } - private void batchUpdateDeprecation(boolean deprecated, + private void batchUpdateDeprecation( + boolean deprecated, @Nullable String note, @Nullable Long decommissionTime, List resources, QueryContext context) { - log.debug("Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", deprecated, note, decommissionTime, resources); + log.debug( + "Batch updating deprecation. deprecated: {}, note: {}, decommissionTime: {}, resources: {}", + deprecated, + note, + decommissionTime, + resources); try { DeprecationUtils.updateDeprecationForResources( deprecated, @@ -78,10 +94,14 @@ private void batchUpdateDeprecation(boolean deprecated, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to batch update deprecated to %s for resources with urns %s!", - deprecated, - resources.stream().map(ResourceRefInput::getResourceUrn).collect(Collectors.toList())), + throw new RuntimeException( + String.format( + "Failed to batch update deprecated to %s for resources with urns %s!", + deprecated, + resources.stream() + .map(ResourceRefInput::getResourceUrn) + .collect(Collectors.toList())), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java index 69b2b92fb9ccaf..5a25e6d83e648a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/BatchUpdateSoftDeletedResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -14,9 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchUpdateSoftDeletedResolver implements DataFetcher> { @@ -26,24 +25,32 @@ public class BatchUpdateSoftDeletedResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final BatchUpdateSoftDeletedInput input = bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); + final BatchUpdateSoftDeletedInput input = + bindArgument(environment.getArgument("input"), BatchUpdateSoftDeletedInput.class); final List urns = input.getUrns(); final boolean deleted = input.getDeleted(); - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { - // First, validate the entities exist - validateInputUrns(urns, context); + // First, validate the entities exist + validateInputUrns(urns, context); - try { - // Then execute the bulk soft delete - batchUpdateSoftDeleted(deleted, urns, context); - return true; - } catch (Exception e) { - log.error("Failed to perform batch soft delete against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform batch soft delete against input %s", input.toString()), e); - } - }); + try { + // Then execute the bulk soft delete + batchUpdateSoftDeleted(deleted, urns, context); + return true; + } catch (Exception e) { + log.error( + "Failed to perform batch soft delete against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform batch soft delete against input %s", input.toString()), + e); + } + }); } private void validateInputUrns(List urnStrs, QueryContext context) { @@ -55,10 +62,12 @@ private void validateInputUrns(List urnStrs, QueryContext context) { private void validateInputUrn(String urnStr, QueryContext context) { final Urn urn = UrnUtils.getUrn(urnStr); if (!DeleteUtils.isAuthorizedToDeleteEntity(context, urn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } if (!_entityService.exists(urn)) { - throw new IllegalArgumentException(String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); + throw new IllegalArgumentException( + String.format("Failed to soft delete entity with urn %s. Entity does not exist.", urn)); } } @@ -66,14 +75,12 @@ private void batchUpdateSoftDeleted(boolean removed, List urnStrs, Query log.debug("Batch soft deleting assets. urns: {}", urnStrs); try { DeleteUtils.updateStatusForResources( - removed, - urnStrs, - UrnUtils.getUrn(context.getActorUrn()), - _entityService); + removed, urnStrs, UrnUtils.getUrn(context.getActorUrn()), _entityService); } catch (Exception e) { throw new RuntimeException( - String.format("Failed to batch update soft deleted status entities with urns %s!", urnStrs), + String.format( + "Failed to batch update soft deleted status entities with urns %s!", urnStrs), e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java index 59d5d6939c04c8..d0796389d22808 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/DescriptionUtils.java @@ -1,13 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.container.EditableContainerProperties; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.dataproduct.DataProductProperties; import com.linkedin.domain.DomainProperties; @@ -30,148 +31,191 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DescriptionUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DescriptionUtils() { } + private DescriptionUtils() {} public static void updateFieldDescription( String newDescription, Urn resourceUrn, String fieldPath, Urn actor, - EntityService entityService - ) { - EditableSchemaMetadata editableSchemaMetadata = - (EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); - - editableFieldInfo.setDescription(newDescription); - - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + EntityService entityService) { + EditableSchemaMetadata editableSchemaMetadata = + (EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, fieldPath); + + editableFieldInfo.setDescription(newDescription); + + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } public static void updateContainerDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { EditableContainerProperties containerProperties = - (EditableContainerProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableContainerProperties()); + (EditableContainerProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableContainerProperties()); containerProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, containerProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + containerProperties, + actor, + entityService); } public static void updateDomainDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { DomainProperties domainProperties = - (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, entityService, null); + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + entityService, + null); if (domainProperties == null) { - // If there are no properties for the domain already, then we should throw since the properties model also requires a name. + // If there are no properties for the domain already, then we should throw since the + // properties model also requires a name. throw new IllegalArgumentException("Properties for this Domain do not yet exist!"); } domainProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + entityService); } public static void updateTagDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { TagProperties tagProperties = - (TagProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); + (TagProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), Constants.TAG_PROPERTIES_ASPECT_NAME, entityService, null); if (tagProperties == null) { - // If there are no properties for the tag already, then we should throw since the properties model also requires a name. + // If there are no properties for the tag already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Tag do not yet exist!"); } tagProperties.setDescription(newDescription); - persistAspect(resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); + persistAspect( + resourceUrn, Constants.TAG_PROPERTIES_ASPECT_NAME, tagProperties, actor, entityService); } public static void updateCorpGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { CorpGroupEditableInfo corpGroupEditableInfo = - (CorpGroupEditableInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, entityService, new CorpGroupEditableInfo()); + (CorpGroupEditableInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + entityService, + new CorpGroupEditableInfo()); if (corpGroupEditableInfo != null) { corpGroupEditableInfo.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, corpGroupEditableInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + corpGroupEditableInfo, + actor, + entityService); } public static void updateGlossaryTermDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + entityService, + null); if (glossaryTermInfo == null) { - // If there are no properties for the term already, then we should throw since the properties model also requires a name. + // If there are no properties for the term already, then we should throw since the properties + // model also requires a name. throw new IllegalArgumentException("Properties for this Glossary Term do not yet exist!"); } - glossaryTermInfo.setDefinition(newDescription); // We call description 'definition' for glossary terms. Not great, we know. :( - persistAspect(resourceUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, entityService); + glossaryTermInfo.setDefinition( + newDescription); // We call description 'definition' for glossary terms. Not great, we know. + // :( + persistAspect( + resourceUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + entityService); } public static void updateGlossaryNodeDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setDefinition(newDescription); - persistAspect(resourceUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, entityService); + persistAspect( + resourceUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + entityService); } public static void updateNotebookDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableNotebookProperties notebookProperties = (EditableNotebookProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, entityService, null); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableNotebookProperties notebookProperties = + (EditableNotebookProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + entityService, + null); if (notebookProperties != null) { notebookProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, notebookProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + notebookProperties, + actor, + entityService); } public static Boolean validateFieldDescriptionInput( Urn resourceUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); @@ -179,51 +223,41 @@ public static Boolean validateFieldDescriptionInput( return true; } - public static Boolean validateDomainInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateDomainInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateContainerInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateContainerInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateLabelInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateLabelInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", resourceUrn, resourceUrn)); } return true; } - public static Boolean validateCorpGroupInput( - Urn corpUserUrn, - EntityService entityService - ) { + public static Boolean validateCorpGroupInput(Urn corpUserUrn, EntityService entityService) { if (!entityService.exists(corpUserUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", corpUserUrn, corpUserUrn)); } return true; } - public static Boolean validateNotebookInput( - Urn notebookUrn, - EntityService entityService) { + public static Boolean validateNotebookInput(Urn notebookUrn, EntityService entityService) { if (!entityService.exists(notebookUrn)) { throw new IllegalArgumentException( String.format("Failed to update %s. %s does not exist.", notebookUrn, notebookUrn)); @@ -231,11 +265,15 @@ public static Boolean validateNotebookInput( return true; } - public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateFieldDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -245,11 +283,14 @@ public static boolean isAuthorizedToUpdateFieldDescription(@Nonnull QueryContext orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -259,25 +300,31 @@ public static boolean isAuthorizedToUpdateDomainDescription(@Nonnull QueryContex orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateContainerDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); - - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getActorUrn(), - targetUrn.getEntityType(), - targetUrn.toString(), - orPrivilegeGroups); - } + public static boolean isAuthorizedToUpdateContainerDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); + + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getActorUrn(), + targetUrn.getEntityType(), + targetUrn.toString(), + orPrivilegeGroups); + } - public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext context, Urn targetUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDescription( + @Nonnull QueryContext context, Urn targetUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -288,79 +335,122 @@ public static boolean isAuthorizedToUpdateDescription(@Nonnull QueryContext cont } public static void updateMlModelDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelProperties editableProperties = (EditableMLModelProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelProperties editableProperties = + (EditableMLModelProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlModelGroupDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLModelGroupProperties editableProperties = (EditableMLModelGroupProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLModelGroupProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLModelGroupProperties editableProperties = + (EditableMLModelGroupProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLModelGroupProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } + public static void updateMlFeatureDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureProperties editableProperties = (EditableMLFeatureProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureProperties editableProperties = + (EditableMLFeatureProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlFeatureTableDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLFeatureTableProperties editableProperties = (EditableMLFeatureTableProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLFeatureTableProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLFeatureTableProperties editableProperties = + (EditableMLFeatureTableProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLFeatureTableProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateMlPrimaryKeyDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - EditableMLPrimaryKeyProperties editableProperties = (EditableMLPrimaryKeyProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, entityService, new EditableMLPrimaryKeyProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + EditableMLPrimaryKeyProperties editableProperties = + (EditableMLPrimaryKeyProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + entityService, + new EditableMLPrimaryKeyProperties()); if (editableProperties != null) { editableProperties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, editableProperties, actor, entityService); + persistAspect( + resourceUrn, + Constants.ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, + editableProperties, + actor, + entityService); } public static void updateDataProductDescription( - String newDescription, - Urn resourceUrn, - Urn actor, - EntityService entityService) { - DataProductProperties properties = (DataProductProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, entityService, new DataProductProperties()); + String newDescription, Urn resourceUrn, Urn actor, EntityService entityService) { + DataProductProperties properties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + entityService, + new DataProductProperties()); if (properties != null) { properties.setDescription(newDescription); } - persistAspect(resourceUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, properties, actor, entityService); + persistAspect( + resourceUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + properties, + actor, + entityService); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java index e5e3a5a0ee42e3..e4c5c132be4f7d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MoveDomainResolver.java @@ -19,11 +19,10 @@ import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - @Slf4j @RequiredArgsConstructor public class MoveDomainResolver implements DataFetcher> { @@ -33,57 +32,78 @@ public class MoveDomainResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final MoveDomainInput input = ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); + final MoveDomainInput input = + ResolverUtils.bindArgument(environment.getArgument("input"), MoveDomainInput.class); final QueryContext context = environment.getContext(); final Urn resourceUrn = UrnUtils.getUrn(input.getResourceUrn()); - final Urn newParentDomainUrn = input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; + final Urn newParentDomainUrn = + input.getParentDomain() != null ? UrnUtils.getUrn(input.getParentDomain()) : null; - return CompletableFuture.supplyAsync(() -> { - if (!AuthorizationUtils.canManageDomains(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canManageDomains(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Resource is not a domain."); - } + try { + if (!resourceUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Resource is not a domain."); + } - DomainProperties properties = (DomainProperties) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, - null - ); + DomainProperties properties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); - if (properties == null) { - throw new IllegalArgumentException("Domain properties do not exist."); - } + if (properties == null) { + throw new IllegalArgumentException("Domain properties do not exist."); + } - if (newParentDomainUrn != null) { - if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { - throw new IllegalArgumentException("Parent entity is not a domain."); - } - if (!_entityService.exists(newParentDomainUrn)) { - throw new IllegalArgumentException("Parent entity does not exist."); - } - } + if (newParentDomainUrn != null) { + if (!newParentDomainUrn.getEntityType().equals(Constants.DOMAIN_ENTITY_NAME)) { + throw new IllegalArgumentException("Parent entity is not a domain."); + } + if (!_entityService.exists(newParentDomainUrn)) { + throw new IllegalArgumentException("Parent entity does not exist."); + } + } - if (DomainUtils.hasNameConflict(properties.getName(), newParentDomainUrn, context, _entityClient)) { - throw new DataHubGraphQLException( - String.format("\"%s\" already exists in the destination domain. Please pick a unique name.", properties.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); - } + if (DomainUtils.hasNameConflict( + properties.getName(), newParentDomainUrn, context, _entityClient)) { + throw new DataHubGraphQLException( + String.format( + "\"%s\" already exists in the destination domain. Please pick a unique name.", + properties.getName()), + DataHubGraphQLErrorCode.CONFLICT); + } - properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - MutationUtils.persistAspect(resourceUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, properties, actor, _entityService); - return true; - } catch (DataHubGraphQLException e) { - throw e; - } catch (Exception e) { - log.error("Failed to move domain {} to parent {} : {}", input.getResourceUrn(), input.getParentDomain(), e.getMessage()); - throw new RuntimeException(String.format("Failed to move domain %s to %s", input.getResourceUrn(), input.getParentDomain()), e); - } - }); + properties.setParentDomain(newParentDomainUrn, SetMode.REMOVE_IF_NULL); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + MutationUtils.persistAspect( + resourceUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + properties, + actor, + _entityService); + return true; + } catch (DataHubGraphQLException e) { + throw e; + } catch (Exception e) { + log.error( + "Failed to move domain {} to parent {} : {}", + input.getResourceUrn(), + input.getParentDomain(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to move domain %s to %s", + input.getResourceUrn(), input.getParentDomain()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java index 30bd940a7dfed4..064b532a792c19 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolver.java @@ -1,20 +1,18 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.codahale.metrics.Timer; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.metadata.utils.metrics.MetricUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -23,7 +21,8 @@ */ public class MutableTypeBatchResolver implements DataFetcher>> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeBatchResolver.class.getName()); private final BatchMutableType _batchMutableType; @@ -33,21 +32,23 @@ public MutableTypeBatchResolver(final BatchMutableType batchMutableType @Override public CompletableFuture> get(DataFetchingEnvironment environment) throws Exception { - final B[] input = bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); - - return CompletableFuture.supplyAsync(() -> { - Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); - - try { - return _batchMutableType.batchUpdate(input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error("Failed to perform batchUpdate", e); - throw new IllegalArgumentException(e); - } finally { - timer.stop(); - } - }); + final B[] input = + bindArgument(environment.getArgument("input"), _batchMutableType.batchInputClass()); + + return CompletableFuture.supplyAsync( + () -> { + Timer.Context timer = MetricUtils.timer(this.getClass(), "batchMutate").time(); + + try { + return _batchMutableType.batchUpdate(input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error("Failed to perform batchUpdate", e); + throw new IllegalArgumentException(e); + } finally { + timer.stop(); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java index 115a68e808de6c..c62282c9065971 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.types.MutableType; import graphql.schema.DataFetcher; @@ -8,8 +10,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** * Generic GraphQL resolver responsible for performing updates against particular types. * @@ -18,28 +18,34 @@ */ public class MutableTypeResolver implements DataFetcher> { - private static final Logger _logger = LoggerFactory.getLogger(MutableTypeResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(MutableTypeResolver.class.getName()); - private final MutableType _mutableType; + private final MutableType _mutableType; - public MutableTypeResolver(final MutableType mutableType) { - _mutableType = mutableType; - } + public MutableTypeResolver(final MutableType mutableType) { + _mutableType = mutableType; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final String urn = environment.getArgument("urn"); - final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug(String.format("Mutating entity. input: %s", input)); - return _mutableType.update(urn, input, environment.getContext()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - _logger.error(String.format("Failed to perform update against input %s", input) + " " + e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } + @Override + public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + final String urn = environment.getArgument("urn"); + final I input = bindArgument(environment.getArgument("input"), _mutableType.inputClass()); + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug(String.format("Mutating entity. input: %s", input)); + return _mutableType.update(urn, input, environment.getContext()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + _logger.error( + String.format("Failed to perform update against input %s", input) + + " " + + e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } }); - } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java index c862fcfa83594e..4a915b2a477cbc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.StringMap; @@ -19,49 +21,56 @@ import java.util.Optional; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class MutationUtils { - private MutationUtils() { } + private MutationUtils() {} - public static void persistAspect(Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); + public static void persistAspect( + Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn(urn, aspectName, aspect); entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param urn * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithUrn(Urn urn, String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithUrn( + Urn urn, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityUrn(urn); return setProposalProperties(proposal, urn.getEntityType(), aspectName, aspect); } /** - * Only intended for use from GraphQL mutations, executes a different flow indicating a request sourced from the UI + * Only intended for use from GraphQL mutations, executes a different flow indicating a request + * sourced from the UI + * * @param entityKey * @param entityType * @param aspectName * @param aspect * @return */ - public static MetadataChangeProposal buildMetadataChangeProposalWithKey(RecordTemplate entityKey, String entityType, - String aspectName, RecordTemplate aspect) { + public static MetadataChangeProposal buildMetadataChangeProposalWithKey( + RecordTemplate entityKey, String entityType, String aspectName, RecordTemplate aspect) { final MetadataChangeProposal proposal = new MetadataChangeProposal(); proposal.setEntityKeyAspect(GenericRecordUtils.serializeAspect(entityKey)); return setProposalProperties(proposal, entityType, aspectName, aspect); } - private static MetadataChangeProposal setProposalProperties(MetadataChangeProposal proposal, - String entityType, String aspectName, RecordTemplate aspect) { + private static MetadataChangeProposal setProposalProperties( + MetadataChangeProposal proposal, + String entityType, + String aspectName, + RecordTemplate aspect) { proposal.setEntityType(entityType); proposal.setAspectName(aspectName); proposal.setAspect(GenericRecordUtils.serializeAspect(aspect)); @@ -77,18 +86,16 @@ private static MetadataChangeProposal setProposalProperties(MetadataChangePropos } public static EditableSchemaFieldInfo getFieldInfoFromSchema( - EditableSchemaMetadata editableSchemaMetadata, - String fieldPath - ) { + EditableSchemaMetadata editableSchemaMetadata, String fieldPath) { if (!editableSchemaMetadata.hasEditableSchemaFieldInfo()) { editableSchemaMetadata.setEditableSchemaFieldInfo(new EditableSchemaFieldInfoArray()); } EditableSchemaFieldInfoArray editableSchemaMetadataArray = editableSchemaMetadata.getEditableSchemaFieldInfo(); - Optional fieldMetadata = editableSchemaMetadataArray - .stream() - .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) - .findFirst(); + Optional fieldMetadata = + editableSchemaMetadataArray.stream() + .filter(fieldInfo -> fieldInfo.getFieldPath().equals(fieldPath)) + .findFirst(); if (fieldMetadata.isPresent()) { return fieldMetadata.get(); @@ -104,34 +111,37 @@ public static Boolean validateSubresourceExists( Urn targetUrn, String subResource, SubResourceType subResourceType, - EntityService entityService - ) { + EntityService entityService) { if (subResourceType.equals(SubResourceType.DATASET_FIELD)) { - SchemaMetadata schemaMetadata = (SchemaMetadata) entityService.getAspect(targetUrn, - Constants.SCHEMA_METADATA_ASPECT_NAME, 0); + SchemaMetadata schemaMetadata = + (SchemaMetadata) + entityService.getAspect(targetUrn, Constants.SCHEMA_METADATA_ASPECT_NAME, 0); if (schemaMetadata == null) { throw new IllegalArgumentException( - String.format("Failed to update %s & field %s. %s has no schema.", targetUrn, subResource, targetUrn) - ); + String.format( + "Failed to update %s & field %s. %s has no schema.", + targetUrn, subResource, targetUrn)); } Optional fieldMatch = - schemaMetadata.getFields().stream().filter(field -> field.getFieldPath().equals(subResource)).findFirst(); + schemaMetadata.getFields().stream() + .filter(field -> field.getFieldPath().equals(subResource)) + .findFirst(); if (!fieldMatch.isPresent()) { - throw new IllegalArgumentException(String.format( - "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", - targetUrn, subResource, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to update %s & field %s. Field %s does not exist in the datasets schema.", + targetUrn, subResource, subResource)); } return true; } - throw new IllegalArgumentException(String.format( - "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", - targetUrn, subResource, SubResourceType.values() - )); + throw new IllegalArgumentException( + String.format( + "Failed to update %s. SubResourceType (%s) is not valid. Types supported: %s.", + targetUrn, subResource, SubResourceType.values())); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java index 23c08043af5d3c..f84d1b3a66f6f8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveLinkResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,9 +15,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveLinkResolver implements DataFetcher> { @@ -24,36 +23,38 @@ public class RemoveLinkResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final RemoveLinkInput input = bindArgument(environment.getArgument("input"), RemoveLinkInput.class); + final RemoveLinkInput input = + bindArgument(environment.getArgument("input"), RemoveLinkInput.class); String linkUrl = input.getLinkUrl(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); if (!LinkUtils.isAuthorizedToUpdateLinks(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LinkUtils.validateAddRemoveInput( - linkUrl, - targetUrn, - _entityService - ); - try { - log.debug("Removing Link input: {}", input); - - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LinkUtils.removeLink( - linkUrl, - targetUrn, - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove link from resource with input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to remove link from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + LinkUtils.validateAddRemoveInput(linkUrl, targetUrn, _entityService); + try { + log.debug("Removing Link input: {}", input); + + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LinkUtils.removeLink(linkUrl, targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to remove link from resource with input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to remove link from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java index 2d5faaab444589..9827aa0666d19b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveOwnerResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveOwnerResolver implements DataFetcher> { @@ -27,36 +26,42 @@ public class RemoveOwnerResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final RemoveOwnerInput input = bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); + final RemoveOwnerInput input = + bindArgument(environment.getArgument("input"), RemoveOwnerInput.class); Urn ownerUrn = Urn.createFromString(input.getOwnerUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - Optional maybeOwnershipTypeUrn = input.getOwnershipTypeUrn() == null ? Optional.empty() - : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); + Optional maybeOwnershipTypeUrn = + input.getOwnershipTypeUrn() == null + ? Optional.empty() + : Optional.of(Urn.createFromString(input.getOwnershipTypeUrn())); if (!OwnerUtils.isAuthorizedToUpdateOwners(environment.getContext(), targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - OwnerUtils.validateRemoveInput( - targetUrn, - _entityService - ); - try { - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - OwnerUtils.removeOwnersFromResources( - ImmutableList.of(ownerUrn), - maybeOwnershipTypeUrn, - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to remove owner from resource with input {}", input); - throw new RuntimeException(String.format("Failed to remove owner from resource with input %s", input.toString()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + OwnerUtils.validateRemoveInput(targetUrn, _entityService); + try { + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + OwnerUtils.removeOwnersFromResources( + ImmutableList.of(ownerUrn), + maybeOwnershipTypeUrn, + ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), null, null)), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error("Failed to remove owner from resource with input {}", input); + throw new RuntimeException( + String.format( + "Failed to remove owner from resource with input %s", input.toString()), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java index 33a95c35760614..7e2919e0ca1f68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTagResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTagResolver implements DataFetcher> { @@ -26,44 +25,54 @@ public class RemoveTagResolver implements DataFetcher @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { - final TagAssociationInput input = bindArgument(environment.getArgument("input"), TagAssociationInput.class); + final TagAssociationInput input = + bindArgument(environment.getArgument("input"), TagAssociationInput.class); Urn tagUrn = Urn.createFromString(input.getTagUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTags(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTags( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - tagUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.TAG_ENTITY_NAME, - _entityService, - true - ); - try { + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + tagUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.TAG_ENTITY_NAME, + _entityService, + true); + try { - if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { - log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); - return false; - } + if (!tagUrn.getEntityType().equals(Constants.TAG_ENTITY_NAME)) { + log.error("Failed to remove %s. It is not a tag urn.", tagUrn.toString()); + return false; + } - log.debug("Removing Tag. input: %s", input); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTagsFromResources( - ImmutableList.of(tagUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.debug("Removing Tag. input: %s", input); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTagsFromResources( + ImmutableList.of(tagUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java index 8f18b0ecd61989..ec38360df6d8e9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/RemoveTermResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; @@ -16,9 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class RemoveTermResolver implements DataFetcher> { @@ -26,45 +25,55 @@ public class RemoveTermResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final TermAssociationInput input = bindArgument(environment.getArgument("input"), TermAssociationInput.class); + final TermAssociationInput input = + bindArgument(environment.getArgument("input"), TermAssociationInput.class); Urn termUrn = Urn.createFromString(input.getTermUrn()); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); - if (!LabelUtils.isAuthorizedToUpdateTerms(environment.getContext(), targetUrn, input.getSubResource())) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (!LabelUtils.isAuthorizedToUpdateTerms( + environment.getContext(), targetUrn, input.getSubResource())) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - LabelUtils.validateResourceAndLabel( - termUrn, - targetUrn, - input.getSubResource(), - input.getSubResourceType(), - Constants.GLOSSARY_TERM_ENTITY_NAME, - _entityService, - true - ); + return CompletableFuture.supplyAsync( + () -> { + LabelUtils.validateResourceAndLabel( + termUrn, + targetUrn, + input.getSubResource(), + input.getSubResourceType(), + Constants.GLOSSARY_TERM_ENTITY_NAME, + _entityService, + true); - try { + try { - if (!termUrn.getEntityType().equals("glossaryTerm")) { - log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); - return false; - } + if (!termUrn.getEntityType().equals("glossaryTerm")) { + log.error("Failed to remove {}. It is not a glossary term urn.", termUrn.toString()); + return false; + } - log.info(String.format("Removing Term. input: {}", input)); - Urn actor = CorpuserUrn.createFromString(((QueryContext) environment.getContext()).getActorUrn()); - LabelUtils.removeTermsFromResources( - ImmutableList.of(termUrn), - ImmutableList.of(new ResourceRefInput(input.getResourceUrn(), input.getSubResourceType(), input.getSubResource())), - actor, - _entityService - ); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + log.info(String.format("Removing Term. input: {}", input)); + Urn actor = + CorpuserUrn.createFromString( + ((QueryContext) environment.getContext()).getActorUrn()); + LabelUtils.removeTermsFromResources( + ImmutableList.of(termUrn), + ImmutableList.of( + new ResourceRefInput( + input.getResourceUrn(), + input.getSubResourceType(), + input.getSubResource())), + actor, + _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java index d6e6e5610da56a..13a8427633caee 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateDescriptionResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -12,19 +14,14 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; - +import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import javax.annotation.Nonnull; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - @Slf4j @RequiredArgsConstructor public class UpdateDescriptionResolver implements DataFetcher> { @@ -33,7 +30,8 @@ public class UpdateDescriptionResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final DescriptionUpdateInput input = bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); + final DescriptionUpdateInput input = + bindArgument(environment.getArgument("input"), DescriptionUpdateInput.class); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating description. input: {}", input.toString()); switch (targetUrn.getEntityType()) { @@ -67,380 +65,383 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return updateDataProductDescription(targetUrn, input, environment.getContext()); default: throw new RuntimeException( - String.format("Failed to update description. Unsupported resource type %s provided.", targetUrn)); + String.format( + "Failed to update description. Unsupported resource type %s provided.", targetUrn)); } } - private CompletableFuture updateContainerDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - DescriptionUtils.validateContainerInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateContainerDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateContainerDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateContainerDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + DescriptionUtils.validateContainerInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateContainerDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateDomainDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateDomainInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDomainDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateDomainDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDomainDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateDomainInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDomainDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - // If updating schema field description fails, try again on a sibling until there are no more siblings to try. Then throw if necessary. + // If updating schema field description fails, try again on a sibling until there are no more + // siblings to try. Then throw if necessary. private Boolean attemptUpdateDatasetSchemaFieldDescription( @Nonnull final Urn targetUrn, @Nonnull final DescriptionUpdateInput input, @Nonnull final QueryContext context, @Nonnull final HashSet attemptedUrns, - @Nonnull final List siblingUrns - ) { + @Nonnull final List siblingUrns) { attemptedUrns.add(targetUrn); try { - DescriptionUtils.validateFieldDescriptionInput(targetUrn, input.getSubResource(), input.getSubResourceType(), - _entityService); + DescriptionUtils.validateFieldDescriptionInput( + targetUrn, input.getSubResource(), input.getSubResourceType(), _entityService); final Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateFieldDescription(input.getDescription(), targetUrn, input.getSubResource(), actor, - _entityService); + DescriptionUtils.updateFieldDescription( + input.getDescription(), targetUrn, input.getSubResource(), actor, _entityService); return true; } catch (Exception e) { final Optional siblingUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, attemptedUrns); if (siblingUrn.isPresent()) { - log.warn("Failed to update description for input {}, trying sibling urn {} now.", input.toString(), siblingUrn.get()); - return attemptUpdateDatasetSchemaFieldDescription(siblingUrn.get(), input, context, attemptedUrns, siblingUrns); + log.warn( + "Failed to update description for input {}, trying sibling urn {} now.", + input.toString(), + siblingUrn.get()); + return attemptUpdateDatasetSchemaFieldDescription( + siblingUrn.get(), input, context, attemptedUrns, siblingUrns); } else { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } - private CompletableFuture updateDatasetSchemaFieldDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + private CompletableFuture updateDatasetSchemaFieldDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - if (!DescriptionUtils.isAuthorizedToUpdateFieldDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - if (input.getSubResourceType() == null) { - throw new IllegalArgumentException("Update description without subresource is not currently supported"); - } + if (input.getSubResourceType() == null) { + throw new IllegalArgumentException( + "Update description without subresource is not currently supported"); + } - List siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); + List siblingUrns = SiblingsUtils.getSiblingUrns(targetUrn, _entityService); - return attemptUpdateDatasetSchemaFieldDescription(targetUrn, input, context, new HashSet<>(), siblingUrns); - }); + return attemptUpdateDatasetSchemaFieldDescription( + targetUrn, input, context, new HashSet<>(), siblingUrns); + }); } - private CompletableFuture updateTagDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateTagDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateTagDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateTagDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateGlossaryTermDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryTermDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateGlossaryTermDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryTermDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateGlossaryNodeDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) - && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient) - ) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateGlossaryNodeDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateGlossaryNodeDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn) + && !GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateGlossaryNodeDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateCorpGroupDescription(Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateCorpGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateCorpGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateCorpGroupInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateCorpGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateNotebookDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateNotebookInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateNotebookDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateNotebookDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateNotebookInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateNotebookDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlModelDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlModelDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlModelGroupDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlModelGroupDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlModelGroupDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlModelGroupDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlFeatureDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlFeatureDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlPrimaryKeyDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlPrimaryKeyDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlPrimaryKeyDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlPrimaryKeyDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateMlFeatureTableDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateMlFeatureTableDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateMlFeatureTableDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateMlFeatureTableDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } - private CompletableFuture updateDataProductDescription(Urn targetUrn, DescriptionUpdateInput input, - QueryContext context) { - return CompletableFuture.supplyAsync(() -> { - - if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } - DescriptionUtils.validateLabelInput(targetUrn, _entityService); - - try { - Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - DescriptionUtils.updateDataProductDescription( - input.getDescription(), - targetUrn, - actor, - _entityService); - return true; - } catch (Exception e) { - log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); - } - }); + private CompletableFuture updateDataProductDescription( + Urn targetUrn, DescriptionUpdateInput input, QueryContext context) { + return CompletableFuture.supplyAsync( + () -> { + if (!DescriptionUtils.isAuthorizedToUpdateDescription(context, targetUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + DescriptionUtils.validateLabelInput(targetUrn, _entityService); + + try { + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + DescriptionUtils.updateDataProductDescription( + input.getDescription(), targetUrn, actor, _entityService); + return true; + } catch (Exception e) { + log.error( + "Failed to perform update against input {}, {}", input.toString(), e.getMessage()); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index 0e316ac1296ee0..dd44c2718b3a43 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -16,22 +19,18 @@ import com.linkedin.domain.DomainProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateNameResolver implements DataFetcher> { @@ -41,178 +40,232 @@ public class UpdateNameResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final UpdateNameInput input = bindArgument(environment.getArgument("input"), UpdateNameInput.class); + final UpdateNameInput input = + bindArgument(environment.getArgument("input"), UpdateNameInput.class); Urn targetUrn = Urn.createFromString(input.getUrn()); log.info("Updating name. input: {}", input); - return CompletableFuture.supplyAsync(() -> { - if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); - } + return CompletableFuture.supplyAsync( + () -> { + if (!_entityService.exists(targetUrn)) { + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + } - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermName(targetUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeName(targetUrn, input, environment.getContext()); - case Constants.DOMAIN_ENTITY_NAME: - return updateDomainName(targetUrn, input, environment.getContext()); - case Constants.CORP_GROUP_ENTITY_NAME: - return updateGroupName(targetUrn, input, environment.getContext()); - case Constants.DATA_PRODUCT_ENTITY_NAME: - return updateDataProductName(targetUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update name. Unsupported resource type %s provided.", targetUrn)); - } - }); + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermName(targetUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeName(targetUrn, input, environment.getContext()); + case Constants.DOMAIN_ENTITY_NAME: + return updateDomainName(targetUrn, input, environment.getContext()); + case Constants.CORP_GROUP_ENTITY_NAME: + return updateGroupName(targetUrn, input, environment.getContext()); + case Constants.DATA_PRODUCT_ENTITY_NAME: + return updateDataProductName(targetUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update name. Unsupported resource type %s provided.", targetUrn)); + } + }); } private Boolean updateGlossaryTermName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { throw new IllegalArgumentException("Glossary Term does not exist"); } glossaryTermInfo.setName(input.getName()); Urn actor = UrnUtils.getUrn(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateGlossaryNodeName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { final Urn parentNodeUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); if (GlossaryUtils.canManageChildrenEntities(context, parentNodeUrn, _entityClient)) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Glossary Node does not exist"); } glossaryNodeInfo.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateDomainName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateDomainName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageDomains(context)) { try { - DomainProperties domainProperties = (DomainProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, _entityService, null); + DomainProperties domainProperties = + (DomainProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (domainProperties == null) { throw new IllegalArgumentException("Domain does not exist"); } - if (DomainUtils.hasNameConflict(input.getName(), DomainUtils.getParentDomainSafely(domainProperties), context, _entityClient)) { + if (DomainUtils.hasNameConflict( + input.getName(), + DomainUtils.getParentDomainSafely(domainProperties), + context, + _entityClient)) { throw new DataHubGraphQLException( - String.format("\"%s\" already exists in this domain. Please pick a unique name.", input.getName()), - DataHubGraphQLErrorCode.CONFLICT - ); + String.format( + "\"%s\" already exists in this domain. Please pick a unique name.", + input.getName()), + DataHubGraphQLErrorCode.CONFLICT); } domainProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DOMAIN_PROPERTIES_ASPECT_NAME, domainProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + domainProperties, + actor, + _entityService); return true; } catch (DataHubGraphQLException e) { throw e; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - private Boolean updateGroupName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + private Boolean updateGroupName(Urn targetUrn, UpdateNameInput input, QueryContext context) { if (AuthorizationUtils.canManageUsersAndGroups(context)) { try { - CorpGroupInfo corpGroupInfo = (CorpGroupInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.CORP_GROUP_INFO_ASPECT_NAME, _entityService, null); + CorpGroupInfo corpGroupInfo = + (CorpGroupInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.CORP_GROUP_INFO_ASPECT_NAME, + _entityService, + null); if (corpGroupInfo == null) { throw new IllegalArgumentException("Group does not exist"); } corpGroupInfo.setDisplayName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); + persistAspect( + targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private Boolean updateDataProductName( - Urn targetUrn, - UpdateNameInput input, - QueryContext context - ) { + Urn targetUrn, UpdateNameInput input, QueryContext context) { try { - DataProductProperties dataProductProperties = (DataProductProperties) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, _entityService, null); + DataProductProperties dataProductProperties = + (DataProductProperties) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + _entityService, + null); if (dataProductProperties == null) { throw new IllegalArgumentException("Data Product does not exist"); } - Domains dataProductDomains = (Domains) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); - if (dataProductDomains != null && dataProductDomains.hasDomains() && dataProductDomains.getDomains().size() > 0) { + Domains dataProductDomains = + (Domains) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), Constants.DOMAINS_ASPECT_NAME, _entityService, null); + if (dataProductDomains != null + && dataProductDomains.hasDomains() + && dataProductDomains.getDomains().size() > 0) { // get first domain since we only allow one domain right now Urn domainUrn = UrnUtils.getUrn(dataProductDomains.getDomains().get(0).toString()); - // if they can't edit a data product from either the parent domain permission or from permission on the data product itself, throw error + // if they can't edit a data product from either the parent domain permission or from + // permission on the data product itself, throw error if (!DataProductAuthorizationUtils.isAuthorizedToManageDataProducts(context, domainUrn) && !DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } else { // should not happen since data products need to have a domain if (!DataProductAuthorizationUtils.isAuthorizedToEditDataProduct(context, targetUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } dataProductProperties.setName(input.getName()); Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, dataProductProperties, actor, _entityService); + persistAspect( + targetUrn, + Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + dataProductProperties, + actor, + _entityService); return true; } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java index 5d78bc38eafe87..848118e6cc0f6f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateParentNodeResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; + import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -8,21 +11,17 @@ import com.linkedin.datahub.graphql.generated.UpdateParentNodeInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.client.EntityClient; -import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.entity.EntityUtils; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.concurrent.CompletableFuture; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.persistAspect; - @Slf4j @RequiredArgsConstructor public class UpdateParentNodeResolver implements DataFetcher> { @@ -32,54 +31,72 @@ public class UpdateParentNodeResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { - final UpdateParentNodeInput input = bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); + final UpdateParentNodeInput input = + bindArgument(environment.getArgument("input"), UpdateParentNodeInput.class); final QueryContext context = environment.getContext(); Urn targetUrn = Urn.createFromString(input.getResourceUrn()); log.info("Updating parent node. input: {}", input.toString()); if (!_entityService.exists(targetUrn)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); + throw new IllegalArgumentException( + String.format("Failed to update %s. %s does not exist.", targetUrn, targetUrn)); } GlossaryNodeUrn parentNodeUrn = null; if (input.getParentNode() != null) { parentNodeUrn = GlossaryNodeUrn.createFromString(input.getParentNode()); - if (!_entityService.exists(parentNodeUrn) || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { - throw new IllegalArgumentException(String.format("Failed to update %s. %s either does not exist or is not a glossaryNode.", targetUrn, parentNodeUrn)); + if (!_entityService.exists(parentNodeUrn) + || !parentNodeUrn.getEntityType().equals(Constants.GLOSSARY_NODE_ENTITY_NAME)) { + throw new IllegalArgumentException( + String.format( + "Failed to update %s. %s either does not exist or is not a glossaryNode.", + targetUrn, parentNodeUrn)); } } GlossaryNodeUrn finalParentNodeUrn = parentNodeUrn; - return CompletableFuture.supplyAsync(() -> { - Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); - // need to be able to manage current parent node and new parent node - if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) - && GlossaryUtils.canManageChildrenEntities(context, finalParentNodeUrn, _entityClient)) { - switch (targetUrn.getEntityType()) { - case Constants.GLOSSARY_TERM_ENTITY_NAME: - return updateGlossaryTermParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - case Constants.GLOSSARY_NODE_ENTITY_NAME: - return updateGlossaryNodeParentNode(targetUrn, finalParentNodeUrn, input, environment.getContext()); - default: - throw new RuntimeException( - String.format("Failed to update parentNode. Unsupported resource type %s provided.", targetUrn)); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + Urn currentParentUrn = GlossaryUtils.getParentUrn(targetUrn, context, _entityClient); + // need to be able to manage current parent node and new parent node + if (GlossaryUtils.canManageChildrenEntities(context, currentParentUrn, _entityClient) + && GlossaryUtils.canManageChildrenEntities( + context, finalParentNodeUrn, _entityClient)) { + switch (targetUrn.getEntityType()) { + case Constants.GLOSSARY_TERM_ENTITY_NAME: + return updateGlossaryTermParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + case Constants.GLOSSARY_NODE_ENTITY_NAME: + return updateGlossaryNodeParentNode( + targetUrn, finalParentNodeUrn, input, environment.getContext()); + default: + throw new RuntimeException( + String.format( + "Failed to update parentNode. Unsupported resource type %s provided.", + targetUrn)); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private Boolean updateGlossaryTermParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryTermInfo glossaryTermInfo = (GlossaryTermInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, _entityService, null); + GlossaryTermInfo glossaryTermInfo = + (GlossaryTermInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryTermInfo == null) { - // If there is no info aspect for the term already, then we should throw since the model also requires a name. + // If there is no info aspect for the term already, then we should throw since the model + // also requires a name. throw new IllegalArgumentException("Info for this Glossary Term does not yet exist!"); } @@ -89,12 +106,18 @@ private Boolean updateGlossaryTermParentNode( glossaryTermInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, glossaryTermInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, + glossaryTermInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } @@ -102,11 +125,15 @@ private Boolean updateGlossaryNodeParentNode( Urn targetUrn, GlossaryNodeUrn parentNodeUrn, UpdateParentNodeInput input, - QueryContext context - ) { + QueryContext context) { try { - GlossaryNodeInfo glossaryNodeInfo = (GlossaryNodeInfo) EntityUtils.getAspectFromEntity( - targetUrn.toString(), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, _entityService, null); + GlossaryNodeInfo glossaryNodeInfo = + (GlossaryNodeInfo) + EntityUtils.getAspectFromEntity( + targetUrn.toString(), + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + _entityService, + null); if (glossaryNodeInfo == null) { throw new IllegalArgumentException("Info for this Glossary Node does not yet exist!"); } @@ -117,12 +144,18 @@ private Boolean updateGlossaryNodeParentNode( glossaryNodeInfo.removeParentNode(); } Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); - persistAspect(targetUrn, Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, glossaryNodeInfo, actor, _entityService); + persistAspect( + targetUrn, + Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, + glossaryNodeInfo, + actor, + _entityService); return true; } catch (Exception e) { log.error("Failed to perform update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update against input %s", input.toString()), e); + throw new RuntimeException( + String.format("Failed to perform update against input %s", input.toString()), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java index 875bc43e7c100f..53b215bce7746f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,15 +21,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} - * instead. - */ +/** Deprecated! Use {@link UpdateCorpUserViewsSettingsResolver} instead. */ @Slf4j @RequiredArgsConstructor public class UpdateUserSettingResolver implements DataFetcher> { @@ -35,35 +31,46 @@ public class UpdateUserSettingResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateUserSettingInput input = bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); + final UpdateUserSettingInput input = + bindArgument(environment.getArgument("input"), UpdateUserSettingInput.class); UserSetting name = input.getName(); final boolean value = input.getValue(); final Urn actor = UrnUtils.getUrn(context.getActorUrn()); - return CompletableFuture.supplyAsync(() -> { - try { - // In the future with more settings, we'll need to do a read-modify-write - // for now though, we can just write since there is only 1 setting - CorpUserSettings newSettings = new CorpUserSettings(); - newSettings.setAppearance(new CorpUserAppearanceSettings()); - if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { - newSettings.setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); - } else { - log.error("User Setting name {} not currently supported", name); - throw new RuntimeException(String.format("User Setting name %s not currently supported", name)); - } + return CompletableFuture.supplyAsync( + () -> { + try { + // In the future with more settings, we'll need to do a read-modify-write + // for now though, we can just write since there is only 1 setting + CorpUserSettings newSettings = new CorpUserSettings(); + newSettings.setAppearance(new CorpUserAppearanceSettings()); + if (name.equals(UserSetting.SHOW_SIMPLIFIED_HOMEPAGE)) { + newSettings.setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(value)); + } else { + log.error("User Setting name {} not currently supported", name); + throw new RuntimeException( + String.format("User Setting name %s not currently supported", name)); + } - MetadataChangeProposal proposal = - buildMetadataChangeProposalWithUrn(actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings); - _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); + _entityService.ingestProposal(proposal, EntityUtils.getAuditStamp(actor), false); - return true; - } catch (Exception e) { - log.error("Failed to perform user settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform user settings update against input %s", input.toString()), e); - } - }); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform user settings update against input %s", input.toString()), + e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 7d4c5bee61e19b..3fffe9fa019e7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Status; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; @@ -19,22 +20,21 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class DeleteUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeleteUtils() { } + private DeleteUtils() {} public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.DELETE_ENTITY_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -45,11 +45,7 @@ public static boolean isAuthorizedToDeleteEntity(@Nonnull QueryContext context, } public static void updateStatusForResources( - boolean removed, - List urnStrs, - Urn actor, - EntityService entityService - ) { + boolean removed, List urnStrs, Urn actor, EntityService entityService) { final List changes = new ArrayList<>(); for (String urnStr : urnStrs) { changes.add(buildSoftDeleteProposal(removed, urnStr, actor, entityService)); @@ -58,17 +54,13 @@ public static void updateStatusForResources( } private static MetadataChangeProposal buildSoftDeleteProposal( - boolean removed, - String urnStr, - Urn actor, - EntityService entityService - ) { - Status status = (Status) EntityUtils.getAspectFromEntity( - urnStr, - Constants.STATUS_ASPECT_NAME, - entityService, - new Status()); + boolean removed, String urnStr, Urn actor, EntityService entityService) { + Status status = + (Status) + EntityUtils.getAspectFromEntity( + urnStr, Constants.STATUS_ASPECT_NAME, entityService, new Status()); status.setRemoved(removed); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urnStr), Constants.STATUS_ASPECT_NAME, status); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java index bd82bbb8e514f3..3114e5241711c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java @@ -1,14 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.metadata.Constants; @@ -22,22 +23,22 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.aspect.utils.DeprecationUtils.*; - - @Slf4j public class DeprecationUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DeprecationUtils() { } + private DeprecationUtils() {} - public static boolean isAuthorizedToUpdateDeprecationForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDeprecationForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DEPRECATION_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -53,11 +54,12 @@ public static void updateDeprecationForResources( @Nullable Long decommissionTime, List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildUpdateDeprecationProposal(deprecated, note, decommissionTime, resource, actor, entityService)); + changes.add( + buildUpdateDeprecationProposal( + deprecated, note, decommissionTime, resource, actor, entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } @@ -68,21 +70,11 @@ private static MetadataChangeProposal buildUpdateDeprecationProposal( @Nullable Long decommissionTime, ResourceRefInput resource, Urn actor, - EntityService entityService - ) { + EntityService entityService) { String resourceUrn = resource.getResourceUrn(); - Deprecation deprecation = getDeprecation( - entityService, - resourceUrn, - actor, - note, - deprecated, - decommissionTime - ); + Deprecation deprecation = + getDeprecation(entityService, resourceUrn, actor, note, deprecated, decommissionTime); return MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn(resourceUrn), - Constants.DEPRECATION_ASPECT_NAME, - deprecation - ); + UrnUtils.getUrn(resourceUrn), Constants.DEPRECATION_ASPECT_NAME, deprecation); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index 585fbdf53a2ba4..fb88d6c29f6621 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -1,15 +1,17 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; @@ -30,7 +32,6 @@ import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; - import com.linkedin.r2.RemoteInvocationException; import java.util.ArrayList; import java.util.Collections; @@ -40,13 +41,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; - import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - // TODO: Move to consuming from DomainService. @Slf4j public class DomainUtils { @@ -54,17 +50,20 @@ public class DomainUtils { private static final String HAS_PARENT_DOMAIN_INDEX_FIELD_NAME = "hasParentDomain"; private static final String NAME_INDEX_FIELD_NAME = "name"; - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DomainUtils() { } + private DomainUtils() {} - public static boolean isAuthorizedToUpdateDomainsForEntity(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateDomainsForEntity( + @Nonnull QueryContext context, Urn entityUrn) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOMAINS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -78,8 +77,8 @@ public static void setDomainForResources( @Nullable Urn domainUrn, List resources, Urn actor, - EntityService entityService - ) throws Exception { + EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildSetDomainProposal(domainUrn, resource, actor, entityService)); @@ -88,27 +87,27 @@ public static void setDomainForResources( } private static MetadataChangeProposal buildSetDomainProposal( - @Nullable Urn domainUrn, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { - Domains domains = (Domains) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.DOMAINS_ASPECT_NAME, - entityService, - new Domains()); + @Nullable Urn domainUrn, ResourceRefInput resource, Urn actor, EntityService entityService) { + Domains domains = + (Domains) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.DOMAINS_ASPECT_NAME, + entityService, + new Domains()); final UrnArray newDomains = new UrnArray(); if (domainUrn != null) { newDomains.add(domainUrn); } domains.setDomains(newDomains); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.DOMAINS_ASPECT_NAME, domains); } public static void validateDomain(Urn domainUrn, EntityService entityService) { if (!entityService.exists(domainUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate Domain with urn %s. Urn does not exist.", domainUrn)); } } @@ -119,14 +118,12 @@ private static List buildRootDomainCriteria() { new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("false") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("") - .setCondition(Condition.IS_NULL) - ); + .setCondition(Condition.IS_NULL)); return criteria; } @@ -138,14 +135,12 @@ private static List buildParentDomainCriteria(@Nonnull final Urn pare new Criterion() .setField(HAS_PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue("true") - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); criteria.add( new Criterion() .setField(PARENT_DOMAIN_INDEX_FIELD_NAME) .setValue(parentDomainUrn.toString()) - .setCondition(Condition.EQUAL) - ); + .setCondition(Condition.EQUAL)); return criteria; } @@ -158,36 +153,38 @@ private static Criterion buildNameCriterion(@Nonnull final String name) { } /** - * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain criterion. - * The reason for the OR on root is elastic can have a null|false value to represent an root domain in the index. + * Builds a filter that ORs together the root parent criterion / ANDs together the parent domain + * criterion. The reason for the OR on root is elastic can have a null|false value to represent an + * root domain in the index. + * * @param name an optional name to AND in to each condition of the filter * @param parentDomainUrn the parent domain (null means root). * @return the Filter */ - public static Filter buildNameAndParentDomainFilter(@Nullable final String name, @Nullable final Urn parentDomainUrn) { + public static Filter buildNameAndParentDomainFilter( + @Nullable final String name, @Nullable final Urn parentDomainUrn) { if (parentDomainUrn == null) { - return new Filter().setOr( - new ConjunctiveCriterionArray( - buildRootDomainCriteria().stream().map(parentCriterion -> { - final CriterionArray array = new CriterionArray(parentCriterion); - if (name != null) { - array.add(buildNameCriterion(name)); - } - return new ConjunctiveCriterion().setAnd(array); - }).collect(Collectors.toList()) - ) - ); + return new Filter() + .setOr( + new ConjunctiveCriterionArray( + buildRootDomainCriteria().stream() + .map( + parentCriterion -> { + final CriterionArray array = new CriterionArray(parentCriterion); + if (name != null) { + array.add(buildNameCriterion(name)); + } + return new ConjunctiveCriterion().setAnd(array); + }) + .collect(Collectors.toList()))); } final CriterionArray andArray = new CriterionArray(buildParentDomainCriteria(parentDomainUrn)); if (name != null) { andArray.add(buildNameCriterion(name)); } - return new Filter().setOr( - new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(andArray) - ) - ); + return new Filter() + .setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(andArray))); } public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn) { @@ -196,6 +193,7 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn /** * Check if a domain has any child domains + * * @param domainUrn the URN of the domain to check * @param context query context (includes authorization context to authorize the request) * @param entityClient client used to perform the check @@ -204,18 +202,14 @@ public static Filter buildParentDomainFilter(@Nullable final Urn parentDomainUrn public static boolean hasChildDomains( @Nonnull final Urn domainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) throws RemoteInvocationException { + @Nonnull final EntityClient entityClient) + throws RemoteInvocationException { Filter parentDomainFilter = buildParentDomainFilter(domainUrn); // Search for entities matching parent domain // Limit count to 1 for existence check - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - parentDomainFilter, - null, - 0, - 1, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, parentDomainFilter, null, 0, 1, context.getAuthentication()); return (searchResult.getNumEntities() > 0); } @@ -223,23 +217,18 @@ private static Map getDomainsByNameAndParent( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { final Filter filter = buildNameAndParentDomainFilter(name, parentDomainUrn); - final SearchResult searchResult = entityClient.filter( - DOMAIN_ENTITY_NAME, - filter, - null, - 0, - 1000, - context.getAuthentication()); + final SearchResult searchResult = + entityClient.filter( + DOMAIN_ENTITY_NAME, filter, null, 0, 1000, context.getAuthentication()); - final Set domainUrns = searchResult.getEntities() - .stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toSet()); + final Set domainUrns = + searchResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toSet()); return entityClient.batchGetV2( DOMAIN_ENTITY_NAME, @@ -255,51 +244,63 @@ public static boolean hasNameConflict( @Nonnull final String name, @Nullable final Urn parentDomainUrn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { - final Map entities = getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); + @Nonnull final EntityClient entityClient) { + final Map entities = + getDomainsByNameAndParent(name, parentDomainUrn, context, entityClient); // Even though we searched by name, do one more pass to check the name is unique - return entities.values().stream().anyMatch(entityResponse -> { - if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - DataMap dataMap = entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data(); - DomainProperties domainProperties = new DomainProperties(dataMap); - return (domainProperties.hasName() && domainProperties.getName().equals(name)); - } - return false; - }); + return entities.values().stream() + .anyMatch( + entityResponse -> { + if (entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + DataMap dataMap = + entityResponse + .getAspects() + .get(DOMAIN_PROPERTIES_ASPECT_NAME) + .getValue() + .data(); + DomainProperties domainProperties = new DomainProperties(dataMap); + return (domainProperties.hasName() && domainProperties.getName().equals(name)); + } + return false; + }); } @Nullable public static Entity getParentDomain( @Nonnull final Urn urn, @Nonnull final QueryContext context, - @Nonnull final EntityClient entityClient - ) { + @Nonnull final EntityClient entityClient) { try { - final EntityResponse entityResponse = entityClient.getV2( - DOMAIN_ENTITY_NAME, - urn, - Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), - context.getAuthentication() - ); - - if (entityResponse != null && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { - final DomainProperties properties = new DomainProperties(entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); + final EntityResponse entityResponse = + entityClient.getV2( + DOMAIN_ENTITY_NAME, + urn, + Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME), + context.getAuthentication()); + + if (entityResponse != null + && entityResponse.getAspects().containsKey(DOMAIN_PROPERTIES_ASPECT_NAME)) { + final DomainProperties properties = + new DomainProperties( + entityResponse.getAspects().get(DOMAIN_PROPERTIES_ASPECT_NAME).getValue().data()); final Urn parentDomainUrn = getParentDomainSafely(properties); return parentDomainUrn != null ? UrnToEntityMapper.map(parentDomainUrn) : null; } } catch (Exception e) { - throw new RuntimeException(String.format("Failed to retrieve parent domain for entity %s", urn), e); + throw new RuntimeException( + String.format("Failed to retrieve parent domain for entity %s", urn), e); } return null; } /** - * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where moving a domain - * to the root leaves the parentDomain field set but makes hasParentDomain false. This helper makes sure that queries - * to elastic where hasParentDomain=false and parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * Get a parent domain only if hasParentDomain was set. There is strange elastic behavior where + * moving a domain to the root leaves the parentDomain field set but makes hasParentDomain false. + * This helper makes sure that queries to elastic where hasParentDomain=false and + * parentDomain=value only gives us the parentDomain if hasParentDomain=true. + * * @param properties the domain properties aspect * @return the parentDomain or null */ @@ -307,4 +308,4 @@ public static Entity getParentDomain( public static Urn getParentDomainSafely(@Nonnull final DomainProperties properties) { return properties.hasParentDomain() ? properties.getParentDomain() : null; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java index 8aa4a8d756bea6..15c93904fc3bdd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/EmbedUtils.java @@ -3,7 +3,6 @@ import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -11,20 +10,22 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - @Slf4j public class EmbedUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private EmbedUtils() { } + private EmbedUtils() {} - public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())) - )); + public static boolean isAuthorizedToUpdateEmbedForEntity( + @Nonnull final Urn entityUrn, @Nonnull final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_EMBED_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -33,4 +34,4 @@ public static boolean isAuthorizedToUpdateEmbedForEntity(@Nonnull final Urn enti entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java index 655e5333cb34e1..996bd3da120d6f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/GlossaryUtils.java @@ -1,12 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; @@ -15,32 +15,36 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.authorization.PoliciesConfig.Privilege; import com.linkedin.r2.RemoteInvocationException; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.net.URISyntaxException; import java.util.Optional; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.extern.slf4j.Slf4j; @Slf4j public class GlossaryUtils { - private GlossaryUtils() { } + private GlossaryUtils() {} /** - * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the user has global control - * of their Business Glossary to create, edit, move, and delete Terms and Nodes. + * Checks the Platform Privilege MANAGE_GLOSSARIES to see if a user is authorized. If true, the + * user has global control of their Business Glossary to create, edit, move, and delete Terms and + * Nodes. */ public static boolean canManageGlossaries(@Nonnull QueryContext context) { - return AuthorizationUtils.isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); + return AuthorizationUtils.isAuthorized( + context, Optional.empty(), PoliciesConfig.MANAGE_GLOSSARIES_PRIVILEGE); } /** - * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes under a parent Node. - * They can do this with either the global MANAGE_GLOSSARIES privilege, or if they have the MANAGE_GLOSSARY_CHILDREN privilege - * on the relevant parent node in the Glossary. + * Returns true if the current user is able to create, delete, or move Glossary Terms and Nodes + * under a parent Node. They can do this with either the global MANAGE_GLOSSARIES privilege, or if + * they have the MANAGE_GLOSSARY_CHILDREN privilege on the relevant parent node in the Glossary. */ - public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, @Nonnull EntityClient entityClient) { + public static boolean canManageChildrenEntities( + @Nonnull QueryContext context, + @Nullable Urn parentNodeUrn, + @Nonnull EntityClient entityClient) { if (canManageGlossaries(context)) { return true; } @@ -48,28 +52,31 @@ public static boolean canManageChildrenEntities(@Nonnull QueryContext context, @ return false; // if no parent node, we must rely on the canManageGlossaries method above } - //Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege - if (hasManagePrivilege(context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { + // Check for the MANAGE_GLOSSARY_CHILDREN_PRIVILEGE privilege + if (hasManagePrivilege( + context, parentNodeUrn, PoliciesConfig.MANAGE_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } - //Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no parent associated. + // Check for the MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE privilege recursively until there is no + // parent associated. Urn currentParentNodeUrn = parentNodeUrn; while (currentParentNodeUrn != null) { - if (hasManagePrivilege(context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { + if (hasManagePrivilege( + context, currentParentNodeUrn, PoliciesConfig.MANAGE_ALL_GLOSSARY_CHILDREN_PRIVILEGE)) { return true; } currentParentNodeUrn = getParentUrn(currentParentNodeUrn, context, entityClient); } return false; - } - public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())) - )); + public static boolean hasManagePrivilege( + @Nonnull QueryContext context, @Nullable Urn parentNodeUrn, Privilege privilege) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of(new ConjunctivePrivilegeGroup(ImmutableList.of(privilege.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -83,13 +90,24 @@ public static boolean hasManagePrivilege(@Nonnull QueryContext context, @Nullabl * Returns the urn of the parent node for a given Glossary Term. Returns null if it doesn't exist. */ @Nullable - private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getTermParentUrn( + @Nonnull Urn termUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_TERM_ENTITY_NAME, termUrn, - ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { - GlossaryTermInfo termInfo = new GlossaryTermInfo(response.getAspects() - .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_TERM_ENTITY_NAME, + termUrn, + ImmutableSet.of(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) != null) { + GlossaryTermInfo termInfo = + new GlossaryTermInfo( + response + .getAspects() + .get(Constants.GLOSSARY_TERM_INFO_ASPECT_NAME) + .getValue() + .data()); return termInfo.getParentNode(); } return null; @@ -102,13 +120,24 @@ private static Urn getTermParentUrn(@Nonnull Urn termUrn, @Nonnull QueryContext * Returns the urn of the parent node for a given Glossary Node. Returns null if it doesn't exist. */ @Nullable - private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + private static Urn getNodeParentUrn( + @Nonnull Urn nodeUrn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { try { - EntityResponse response = entityClient.getV2(Constants.GLOSSARY_NODE_ENTITY_NAME, nodeUrn, - ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), context.getAuthentication()); - if (response != null && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { - GlossaryNodeInfo nodeInfo = new GlossaryNodeInfo(response.getAspects() - .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME).getValue().data()); + EntityResponse response = + entityClient.getV2( + Constants.GLOSSARY_NODE_ENTITY_NAME, + nodeUrn, + ImmutableSet.of(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME), + context.getAuthentication()); + if (response != null + && response.getAspects().get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) != null) { + GlossaryNodeInfo nodeInfo = + new GlossaryNodeInfo( + response + .getAspects() + .get(Constants.GLOSSARY_NODE_INFO_ASPECT_NAME) + .getValue() + .data()); return nodeInfo.getParentNode(); } return null; @@ -118,17 +147,21 @@ private static Urn getNodeParentUrn(@Nonnull Urn nodeUrn, @Nonnull QueryContext } /** - * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null otherwise. + * Gets the urn of a Term or Node parent Node. Returns the urn if it exists. Returns null + * otherwise. */ @Nullable - public static Urn getParentUrn(@Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { + public static Urn getParentUrn( + @Nonnull Urn urn, @Nonnull QueryContext context, @Nonnull EntityClient entityClient) { switch (urn.getEntityType()) { case Constants.GLOSSARY_TERM_ENTITY_NAME: return getTermParentUrn(urn, context, entityClient); case Constants.GLOSSARY_NODE_ENTITY_NAME: return getNodeParentUrn(urn, context, entityClient); default: - log.warn("Tried to get the parent node urn of a non-glossary entity type: {}", urn.getEntityType()); + log.warn( + "Tried to get the parent node urn of a non-glossary entity type: {}", + urn.getEntityType()); return null; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index a93c7d5b333da1..8765b91f65d9d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTermAssociation; @@ -13,8 +17,6 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.generated.SubResourceType; import com.linkedin.metadata.Constants; @@ -30,53 +32,56 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming GlossaryTermService, TagService. @Slf4j public class LabelUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LabelUtils() { } + private LabelUtils() {} public static void removeTermFromResource( - Urn labelUrn, - Urn resourceUrn, - String subResource, - Urn actor, - EntityService entityService - ) { + Urn labelUrn, Urn resourceUrn, String subResource, Urn actor, EntityService entityService) { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermIfExists(terms, labelUrn); persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermIfExists(editableFieldInfo.getGlossaryTerms(), labelUrn); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } public static void removeTagsFromResources( - List tags, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List tags, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTagsProposal(tags, resource, actor, entityService)); @@ -85,11 +90,8 @@ public static void removeTagsFromResources( } public static void addTagsToResources( - List tagUrns, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List tagUrns, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTagsProposal(tagUrns, resource, actor, entityService)); @@ -98,11 +100,8 @@ public static void addTagsToResources( } public static void removeTermsFromResources( - List termUrns, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List termUrns, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildRemoveTermsProposal(termUrns, resource, actor, entityService)); @@ -111,11 +110,8 @@ public static void removeTermsFromResources( } public static void addTermsToResources( - List termUrns, - List resources, - Urn actor, - EntityService entityService - ) throws Exception { + List termUrns, List resources, Urn actor, EntityService entityService) + throws Exception { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { changes.add(buildAddTermsProposal(termUrns, resource, actor, entityService)); @@ -128,12 +124,16 @@ public static void addTermsToResource( Urn resourceUrn, String subResource, Urn actor, - EntityService entityService - ) throws URISyntaxException { + EntityService entityService) + throws URISyntaxException { if (subResource == null || subResource.equals("")) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resourceUrn.toString(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -144,10 +144,15 @@ public static void addTermsToResource( persistAspect(resourceUrn, Constants.GLOSSARY_TERMS_ASPECT_NAME, terms, actor, entityService); } else { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, subResource); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, subResource); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -155,7 +160,12 @@ public static void addTermsToResource( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), labelUrns); - persistAspect(resourceUrn, Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata, actor, entityService); + persistAspect( + resourceUrn, + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata, + actor, + entityService); } } @@ -181,17 +191,22 @@ private static GlossaryTermAssociationArray removeTermIfExists(GlossaryTerms ter return termArray; } - public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTags( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -201,19 +216,23 @@ public static boolean isAuthorizedToUpdateTags(@Nonnull QueryContext context, Ur orPrivilegeGroups); } - public static boolean isAuthorizedToUpdateTerms(@Nonnull QueryContext context, Urn targetUrn, String subResource) { + public static boolean isAuthorizedToUpdateTerms( + @Nonnull QueryContext context, Urn targetUrn, String subResource) { Boolean isTargetingSchema = subResource != null && subResource.length() > 0; // Decide whether the current principal should be allowed to update the Dataset. - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(isTargetingSchema - ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() - : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType() - )) - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of( + isTargetingSchema + ? PoliciesConfig.EDIT_DATASET_COL_GLOSSARY_TERMS_PRIVILEGE.getType() + : PoliciesConfig.EDIT_ENTITY_GLOSSARY_TERMS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -230,37 +249,56 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { for (Urn urn : labelUrns) { - validateResourceAndLabel(urn, resourceUrn, subResource, subResourceType, labelEntityType, entityService, isRemoving); + validateResourceAndLabel( + urn, + resourceUrn, + subResource, + subResourceType, + labelEntityType, + entityService, + isRemoving); } } - public static void validateLabel(Urn labelUrn, String labelEntityType, EntityService entityService) { + public static void validateLabel( + Urn labelUrn, String labelEntityType, EntityService entityService) { if (!labelUrn.getEntityType().equals(labelEntityType)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn type does not match entity type %s..", - labelUrn, - labelEntityType)); + throw new IllegalArgumentException( + String.format( + "Failed to validate label with urn %s. Urn type does not match entity type %s..", + labelUrn, labelEntityType)); } if (!entityService.exists(labelUrn)) { - throw new IllegalArgumentException(String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); + throw new IllegalArgumentException( + String.format("Failed to validate label with urn %s. Urn does not exist.", labelUrn)); } } // TODO: Move this out into a separate utilities class. - public static void validateResource(Urn resourceUrn, String subResource, SubResourceType subResourceType, EntityService entityService) { + public static void validateResource( + Urn resourceUrn, + String subResource, + SubResourceType subResourceType, + EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. Entity does not exist.", resourceUrn)); } if ((subResource != null && subResource.length() > 0) || subResourceType != null) { if (subResource == null || subResource.length() == 0) { - throw new IllegalArgumentException(String.format( - "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", resourceUrn, subResourceType)); + throw new IllegalArgumentException( + String.format( + "Failed to update resource with urn %s. SubResourceType (%s) provided without a subResource.", + resourceUrn, subResourceType)); } if (subResourceType == null) { - throw new IllegalArgumentException(String.format( - "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", resourceUrn, subResource)); + throw new IllegalArgumentException( + String.format( + "Failed to updates resource with urn %s. SubResource (%s) provided without a subResourceType.", + resourceUrn, subResource)); } validateSubresourceExists(resourceUrn, subResource, subResourceType, entityService); } @@ -273,8 +311,7 @@ public static void validateResourceAndLabel( SubResourceType subResourceType, String labelEntityType, EntityService entityService, - Boolean isRemoving - ) { + Boolean isRemoving) { if (!isRemoving) { validateLabel(labelUrn, labelEntityType, entityService); } @@ -282,11 +319,8 @@ public static void validateResourceAndLabel( } private static MetadataChangeProposal buildAddTagsProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildAddTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -297,11 +331,8 @@ private static MetadataChangeProposal buildAddTagsProposal( } private static MetadataChangeProposal buildRemoveTagsProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding tags to a top-level entity return buildRemoveTagsToEntityProposal(tagUrns, resource, actor, entityService); @@ -312,82 +343,90 @@ private static MetadataChangeProposal buildRemoveTagsProposal( } private static MetadataChangeProposal buildRemoveTagsToEntityProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } removeTagsIfExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildRemoveTagsToSubResourceProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - entityService, - new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } removeTagsIfExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildAddTagsToEntityProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlobalTags tags = - (com.linkedin.common.GlobalTags) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOBAL_TAGS_ASPECT_NAME, - entityService, new GlobalTags()); + (com.linkedin.common.GlobalTags) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOBAL_TAGS_ASPECT_NAME, + entityService, + new GlobalTags()); if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } addTagsIfNotExists(tags, tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOBAL_TAGS_ASPECT_NAME, tags); } private static MetadataChangeProposal buildAddTagsToSubResourceProposal( - List tagUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List tagUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlobalTags()) { editableFieldInfo.setGlobalTags(new GlobalTags()); } addTagsIfNotExists(editableFieldInfo.getGlobalTags(), tagUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } - private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throws URISyntaxException { + private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) + throws URISyntaxException { if (!tags.hasTags()) { tags.setTags(new TagAssociationArray()); } @@ -396,7 +435,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throw List tagsToAdd = new ArrayList<>(); for (Urn tagUrn : tagUrns) { - if (tagAssociationArray.stream().anyMatch(association -> association.getTag().equals(tagUrn))) { + if (tagAssociationArray.stream() + .anyMatch(association -> association.getTag().equals(tagUrn))) { continue; } tagsToAdd.add(tagUrn); @@ -415,11 +455,8 @@ private static void addTagsIfNotExists(GlobalTags tags, List tagUrns) throw } private static MetadataChangeProposal buildAddTermsProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Adding terms to a top-level entity return buildAddTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -430,11 +467,8 @@ private static MetadataChangeProposal buildAddTermsProposal( } private static MetadataChangeProposal buildRemoveTermsProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { if (resource.getSubResource() == null || resource.getSubResource().equals("")) { // Case 1: Removing terms from a top-level entity return buildRemoveTermsToEntityProposal(termUrns, resource, actor, entityService); @@ -445,14 +479,15 @@ private static MetadataChangeProposal buildRemoveTermsProposal( } private static MetadataChangeProposal buildAddTermsToEntityProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity(resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, - entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); if (!terms.hasTerms()) { @@ -460,20 +495,23 @@ private static MetadataChangeProposal buildAddTermsToEntityProposal( } addTermsIfNotExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildAddTermsToSubResourceProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) throws URISyntaxException { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) + throws URISyntaxException { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } @@ -481,42 +519,48 @@ private static MetadataChangeProposal buildAddTermsToSubResourceProposal( editableFieldInfo.getGlossaryTerms().setAuditStamp(EntityUtils.getAuditStamp(actor)); addTermsIfNotExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static MetadataChangeProposal buildRemoveTermsToEntityProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.common.GlossaryTerms terms = - (com.linkedin.common.GlossaryTerms) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.GLOSSARY_TERMS_ASPECT_NAME, entityService, new GlossaryTerms()); + (com.linkedin.common.GlossaryTerms) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.GLOSSARY_TERMS_ASPECT_NAME, + entityService, + new GlossaryTerms()); terms.setAuditStamp(EntityUtils.getAuditStamp(actor)); removeTermsIfExists(terms, termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), Constants.GLOSSARY_TERMS_ASPECT_NAME, terms); } private static MetadataChangeProposal buildRemoveTermsToSubResourceProposal( - List termUrns, - ResourceRefInput resource, - Urn actor, - EntityService entityService - ) { + List termUrns, ResourceRefInput resource, Urn actor, EntityService entityService) { com.linkedin.schema.EditableSchemaMetadata editableSchemaMetadata = - (com.linkedin.schema.EditableSchemaMetadata) EntityUtils.getAspectFromEntity( - resource.getResourceUrn(), Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, entityService, new EditableSchemaMetadata()); - EditableSchemaFieldInfo editableFieldInfo = getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); + (com.linkedin.schema.EditableSchemaMetadata) + EntityUtils.getAspectFromEntity( + resource.getResourceUrn(), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + entityService, + new EditableSchemaMetadata()); + EditableSchemaFieldInfo editableFieldInfo = + getFieldInfoFromSchema(editableSchemaMetadata, resource.getSubResource()); if (!editableFieldInfo.hasGlossaryTerms()) { editableFieldInfo.setGlossaryTerms(new GlossaryTerms()); } removeTermsIfExists(editableFieldInfo.getGlossaryTerms(), termUrns); - return buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(resource.getResourceUrn()), - Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, editableSchemaMetadata); + return buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(resource.getResourceUrn()), + Constants.EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + editableSchemaMetadata); } private static void addTermsIfNotExists(GlossaryTerms terms, List termUrns) @@ -547,7 +591,8 @@ private static void addTermsIfNotExists(GlossaryTerms terms, List termUrns) } } - private static GlossaryTermAssociationArray removeTermsIfExists(GlossaryTerms terms, List termUrns) { + private static GlossaryTermAssociationArray removeTermsIfExists( + GlossaryTerms terms, List termUrns) { if (!terms.hasTerms()) { terms.setTerms(new GlossaryTermAssociationArray()); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java index 9ec0f9b8e6070d..b93c72edbcfc51 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LinkUtils.java @@ -1,7 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; -import com.google.common.collect.ImmutableList; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; +import com.google.common.collect.ImmutableList; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; @@ -9,59 +12,59 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.entity.EntityService; -import javax.annotation.Nonnull; - import com.linkedin.metadata.entity.EntityUtils; +import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - @Slf4j public class LinkUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private LinkUtils() { } + private LinkUtils() {} public static void addLink( - String linkUrl, - String linkLabel, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, String linkLabel, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); addLink(institutionalMemoryAspect, linkUrl, linkLabel, actor); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } public static void removeLink( - String linkUrl, - Urn resourceUrn, - Urn actor, - EntityService entityService - ) { - InstitutionalMemory institutionalMemoryAspect = (InstitutionalMemory) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - entityService, - new InstitutionalMemory()); + String linkUrl, Urn resourceUrn, Urn actor, EntityService entityService) { + InstitutionalMemory institutionalMemoryAspect = + (InstitutionalMemory) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + entityService, + new InstitutionalMemory()); removeLink(institutionalMemoryAspect, linkUrl); - persistAspect(resourceUrn, Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, institutionalMemoryAspect, actor, entityService); + persistAspect( + resourceUrn, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + institutionalMemoryAspect, + actor, + entityService); } - private static void addLink(InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { + private static void addLink( + InstitutionalMemory institutionalMemoryAspect, String linkUrl, String linkLabel, Urn actor) { if (!institutionalMemoryAspect.hasElements()) { institutionalMemoryAspect.setElements(new InstitutionalMemoryMetadataArray()); } @@ -90,10 +93,12 @@ private static void removeLink(InstitutionalMemory institutionalMemoryAspect, St } public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -104,21 +109,22 @@ public static boolean isAuthorizedToUpdateLinks(@Nonnull QueryContext context, U } public static Boolean validateAddRemoveInput( - String linkUrl, - Urn resourceUrn, - EntityService entityService - ) { + String linkUrl, Urn resourceUrn, EntityService entityService) { try { new Url(linkUrl); } catch (Exception e) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Expected a corp group urn.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Expected a corp group urn.", + resourceUrn)); } if (!entityService.exists(resourceUrn)) { throw new IllegalArgumentException( - String.format("Failed to change institutional memory for resource %s. Resource does not exist.", resourceUrn)); + String.format( + "Failed to change institutional memory for resource %s. Resource does not exist.", + resourceUrn)); } return true; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 72339958044231..15c3c14c7b8f67 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.ConjunctivePrivilegeGroup; import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; @@ -28,104 +30,124 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - // TODO: Move to consuming from OwnerService @Slf4j public class OwnerUtils { - private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); public static final String SYSTEM_ID = "__system__"; - private OwnerUtils() { } + private OwnerUtils() {} public static void addOwnersToResources( List owners, List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildAddOwnersProposal(owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); + changes.add( + buildAddOwnersProposal( + owners, UrnUtils.getUrn(resource.getResourceUrn()), entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } public static void removeOwnersFromResources( - List ownerUrns, Optional maybeOwnershipTypeUrn, List resources, + List ownerUrns, + Optional maybeOwnershipTypeUrn, + List resources, Urn actor, - EntityService entityService - ) { + EntityService entityService) { final List changes = new ArrayList<>(); for (ResourceRefInput resource : resources) { - changes.add(buildRemoveOwnersProposal(ownerUrns, maybeOwnershipTypeUrn, UrnUtils.getUrn(resource.getResourceUrn()), - actor, entityService)); + changes.add( + buildRemoveOwnersProposal( + ownerUrns, + maybeOwnershipTypeUrn, + UrnUtils.getUrn(resource.getResourceUrn()), + actor, + entityService)); } EntityUtils.ingestChangeProposals(changes, entityService, actor, false); } - - static MetadataChangeProposal buildAddOwnersProposal(List owners, Urn resourceUrn, EntityService entityService) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, entityService, - new Ownership()); + static MetadataChangeProposal buildAddOwnersProposal( + List owners, Urn resourceUrn, EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); for (OwnerInput input : owners) { - addOwner(ownershipAspect, UrnUtils.getUrn(input.getOwnerUrn()), input.getType(), UrnUtils.getUrn(input.getOwnershipTypeUrn())); + addOwner( + ownershipAspect, + UrnUtils.getUrn(input.getOwnerUrn()), + input.getType(), + UrnUtils.getUrn(input.getOwnershipTypeUrn())); } - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } public static MetadataChangeProposal buildRemoveOwnersProposal( - List ownerUrns, Optional maybeOwnershipTypeUrn, Urn resourceUrn, + List ownerUrns, + Optional maybeOwnershipTypeUrn, + Urn resourceUrn, Urn actor, - EntityService entityService - ) { - Ownership ownershipAspect = (Ownership) EntityUtils.getAspectFromEntity( - resourceUrn.toString(), - Constants.OWNERSHIP_ASPECT_NAME, - entityService, - new Ownership()); + EntityService entityService) { + Ownership ownershipAspect = + (Ownership) + EntityUtils.getAspectFromEntity( + resourceUrn.toString(), + Constants.OWNERSHIP_ASPECT_NAME, + entityService, + new Ownership()); ownershipAspect.setLastModified(EntityUtils.getAuditStamp(actor)); removeOwnersIfExists(ownershipAspect, ownerUrns, maybeOwnershipTypeUrn); - return buildMetadataChangeProposalWithUrn(resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); + return buildMetadataChangeProposalWithUrn( + resourceUrn, Constants.OWNERSHIP_ASPECT_NAME, ownershipAspect); } - private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { + private static void addOwner( + Ownership ownershipAspect, Urn ownerUrn, OwnershipType type, Urn ownershipUrn) { if (!ownershipAspect.hasOwners()) { ownershipAspect.setOwners(new OwnerArray()); } final OwnerArray ownerArray = new OwnerArray(ownershipAspect.getOwners()); - ownerArray.removeIf(owner -> { - // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) + ownerArray.removeIf( + owner -> { + // Remove old ownership if it exists (check ownerUrn + type (entity & deprecated type)) - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(ownershipUrn); - } + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(ownershipUrn); + } - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(ownershipUrn.toString()); - }); + // Fall back to mapping deprecated type to the new ownership entity, if it matches remove + return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(ownershipUrn.toString()); + }); Owner newOwner = new Owner(); // For backwards compatibility we have to always set the deprecated type. // If the type exists we assume it's an old ownership type that we can map to. // Else if it's a net new custom ownership type set old type to CUSTOM. - com.linkedin.common.OwnershipType gmsType = type != null ? com.linkedin.common.OwnershipType.valueOf(type.toString()) - : com.linkedin.common.OwnershipType.CUSTOM; + com.linkedin.common.OwnershipType gmsType = + type != null + ? com.linkedin.common.OwnershipType.valueOf(type.toString()) + : com.linkedin.common.OwnershipType.CUSTOM; newOwner.setType(gmsType); newOwner.setTypeUrn(ownershipUrn); @@ -135,8 +157,8 @@ private static void addOwner(Ownership ownershipAspect, Urn ownerUrn, OwnershipT ownershipAspect.setOwners(ownerArray); } - private static void removeOwnersIfExists(Ownership ownership, List ownerUrns, - Optional maybeOwnershipTypeUrn) { + private static void removeOwnersIfExists( + Ownership ownership, List ownerUrns, Optional maybeOwnershipTypeUrn) { if (!ownership.hasOwners()) { ownership.setOwners(new OwnerArray()); } @@ -144,23 +166,26 @@ private static void removeOwnersIfExists(Ownership ownership, List ownerUrn OwnerArray ownerArray = ownership.getOwners(); for (Urn ownerUrn : ownerUrns) { if (maybeOwnershipTypeUrn.isPresent()) { - ownerArray.removeIf(owner -> { - // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) - - // Owner is not what we are looking for - if (!owner.getOwner().equals(ownerUrn)) { - return false; - } - - // Check custom entity type urn if exists - if (owner.getTypeUrn() != null) { - return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); - } - - // Fall back to mapping deprecated type to the new ownership entity, if it matches remove - return mapOwnershipTypeToEntity(OwnershipType.valueOf(owner.getType().toString()).name()) - .equals(maybeOwnershipTypeUrn.get().toString()); - }); + ownerArray.removeIf( + owner -> { + // Remove ownership if it exists (check ownerUrn + type (entity & deprecated type)) + + // Owner is not what we are looking for + if (!owner.getOwner().equals(ownerUrn)) { + return false; + } + + // Check custom entity type urn if exists + if (owner.getTypeUrn() != null) { + return owner.getTypeUrn().equals(maybeOwnershipTypeUrn.get()); + } + + // Fall back to mapping deprecated type to the new ownership entity, if it matches + // remove + return mapOwnershipTypeToEntity( + OwnershipType.valueOf(owner.getType().toString()).name()) + .equals(maybeOwnershipTypeUrn.get().toString()); + }); } else { ownerArray.removeIf(owner -> owner.getOwner().equals(ownerUrn)); } @@ -168,10 +193,12 @@ private static void removeOwnersIfExists(Ownership ownership, List ownerUrn } public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, Urn resourceUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -182,10 +209,7 @@ public static boolean isAuthorizedToUpdateOwners(@Nonnull QueryContext context, } public static Boolean validateAddOwnerInput( - List owners, - Urn resourceUrn, - EntityService entityService - ) { + List owners, Urn resourceUrn, EntityService entityService) { for (OwnerInput owner : owners) { boolean result = validateAddOwnerInput(owner, resourceUrn, entityService); if (!result) { @@ -196,13 +220,12 @@ public static Boolean validateAddOwnerInput( } public static Boolean validateAddOwnerInput( - OwnerInput owner, - Urn resourceUrn, - EntityService entityService - ) { + OwnerInput owner, Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } validateOwner(owner, entityService); @@ -210,45 +233,55 @@ public static Boolean validateAddOwnerInput( return true; } - public static void validateOwner( - OwnerInput owner, - EntityService entityService - ) { + public static void validateOwner(OwnerInput owner, EntityService entityService) { OwnerEntityType ownerEntityType = owner.getOwnerEntityType(); Urn ownerUrn = UrnUtils.getUrn(owner.getOwnerUrn()); - if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_GROUP.equals(ownerEntityType) + && !Constants.CORP_GROUP_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp group urn, found %s", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp group urn, found %s", + ownerUrn)); } - if (OwnerEntityType.CORP_USER.equals(ownerEntityType) && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { + if (OwnerEntityType.CORP_USER.equals(ownerEntityType) + && !Constants.CORP_USER_ENTITY_NAME.equals(ownerUrn.getEntityType())) { throw new IllegalArgumentException( - String.format("Failed to change ownership for resource(s). Expected a corp user urn, found %s.", ownerUrn)); + String.format( + "Failed to change ownership for resource(s). Expected a corp user urn, found %s.", + ownerUrn)); } if (!entityService.exists(ownerUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Owner with urn %s does not exist.", ownerUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Owner with urn %s does not exist.", + ownerUrn)); } - if (owner.getOwnershipTypeUrn() != null && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource(s). Custom Ownership type with " - + "urn %s does not exist.", owner.getOwnershipTypeUrn())); + if (owner.getOwnershipTypeUrn() != null + && !entityService.exists(UrnUtils.getUrn(owner.getOwnershipTypeUrn()))) { + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource(s). Custom Ownership type with " + + "urn %s does not exist.", + owner.getOwnershipTypeUrn())); } if (owner.getType() == null && owner.getOwnershipTypeUrn() == null) { - throw new IllegalArgumentException("Failed to change ownership for resource(s). Expected either " - + "type or ownershipTypeUrn to be specified."); + throw new IllegalArgumentException( + "Failed to change ownership for resource(s). Expected either " + + "type or ownershipTypeUrn to be specified."); } } - public static Boolean validateRemoveInput( - Urn resourceUrn, - EntityService entityService - ) { + public static Boolean validateRemoveInput(Urn resourceUrn, EntityService entityService) { if (!entityService.exists(resourceUrn)) { - throw new IllegalArgumentException(String.format("Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); + throw new IllegalArgumentException( + String.format( + "Failed to change ownership for resource %s. Resource does not exist.", resourceUrn)); } return true; } @@ -264,15 +297,17 @@ public static void addCreatorAsOwner( String ownershipTypeUrn = mapOwnershipTypeToEntity(ownershipType.name()); if (!entityService.exists(UrnUtils.getUrn(ownershipTypeUrn))) { - throw new RuntimeException(String.format("Unknown ownership type urn %s", ownershipTypeUrn)); + throw new RuntimeException( + String.format("Unknown ownership type urn %s", ownershipTypeUrn)); } addOwnersToResources( - ImmutableList.of(new OwnerInput(actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), + ImmutableList.of( + new OwnerInput( + actorUrn.toString(), ownerEntityType, ownershipType, ownershipTypeUrn)), ImmutableList.of(new ResourceRefInput(urn, null, null)), actorUrn, - entityService - ); + entityService); } catch (Exception e) { log.error(String.format("Failed to add creator as owner of tag %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java index f740836694dbe4..0dd737d3b2292b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/SiblingsUtils.java @@ -1,32 +1,35 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; + import com.linkedin.common.Siblings; import com.linkedin.common.urn.Urn; import com.linkedin.metadata.entity.EntityService; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import javax.annotation.Nonnull; public class SiblingsUtils { - private SiblingsUtils() { } + private SiblingsUtils() {} - public static List getSiblingUrns(@Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { - final Siblings siblingAspectOfEntity = (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); + public static List getSiblingUrns( + @Nonnull final Urn entityUrn, @Nonnull final EntityService entityService) { + final Siblings siblingAspectOfEntity = + (Siblings) entityService.getLatestAspect(entityUrn, SIBLINGS_ASPECT_NAME); if (siblingAspectOfEntity != null && siblingAspectOfEntity.hasSiblings()) { return siblingAspectOfEntity.getSiblings(); } return new ArrayList<>(); } - public static Optional getNextSiblingUrn(@Nonnull final List siblingUrns, @Nonnull final HashSet usedUrns) { - final List unusedSiblingUrns = siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); + public static Optional getNextSiblingUrn( + @Nonnull final List siblingUrns, @Nonnull final HashSet usedUrns) { + final List unusedSiblingUrns = + siblingUrns.stream().filter(urn -> !usedUrns.contains(urn)).collect(Collectors.toList()); return unusedSiblingUrns.stream().findFirst(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java index c0fe697c6654c3..abc479ed18ebf8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolver.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -10,8 +17,6 @@ import com.linkedin.data.template.StringMap; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.exception.DataHubGraphQLErrorCode; import com.linkedin.datahub.graphql.exception.DataHubGraphQLException; @@ -30,22 +35,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Resolver used for reporting Asset Operations - */ +/** Resolver used for reporting Asset Operations */ @Slf4j @RequiredArgsConstructor public class ReportOperationResolver implements DataFetcher> { - private static final List SUPPORTED_ENTITY_TYPES = ImmutableList.of( - DATASET_ENTITY_NAME - ); + private static final List SUPPORTED_ENTITY_TYPES = ImmutableList.of(DATASET_ENTITY_NAME); private final EntityClient _entityClient; @@ -53,32 +48,36 @@ public class ReportOperationResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final ReportOperationInput input = bindArgument(environment.getArgument("input"), ReportOperationInput.class); - - return CompletableFuture.supplyAsync(() -> { - - Urn entityUrn = UrnUtils.getUrn(input.getUrn()); - - if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - validateInput(entityUrn, input); - - try { - // Create an MCP to emit the operation - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(entityUrn, OPERATION_ASPECT_NAME, - mapOperation(input, context)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to report operation. {}", e.getMessage()); - throw new RuntimeException("Failed to report operation", e); - } - }); + final ReportOperationInput input = + bindArgument(environment.getArgument("input"), ReportOperationInput.class); + + return CompletableFuture.supplyAsync( + () -> { + Urn entityUrn = UrnUtils.getUrn(input.getUrn()); + + if (!isAuthorizedToReportOperationForResource(entityUrn, context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + validateInput(entityUrn, input); + + try { + // Create an MCP to emit the operation + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + entityUrn, OPERATION_ASPECT_NAME, mapOperation(input, context)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to report operation. {}", e.getMessage()); + throw new RuntimeException("Failed to report operation", e); + } + }); } - private Operation mapOperation(final ReportOperationInput input, final QueryContext context) throws URISyntaxException { + private Operation mapOperation(final ReportOperationInput input, final QueryContext context) + throws URISyntaxException { final Operation result = new Operation(); result.setActor(UrnUtils.getUrn(context.getActorUrn())); @@ -86,13 +85,17 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont result.setCustomOperationType(input.getCustomOperationType(), SetMode.IGNORE_NULL); result.setNumAffectedRows(input.getNumAffectedRows(), SetMode.IGNORE_NULL); - long timestampMillis = input.getTimestampMillis() != null ? input.getTimestampMillis() : System.currentTimeMillis(); + long timestampMillis = + input.getTimestampMillis() != null + ? input.getTimestampMillis() + : System.currentTimeMillis(); result.setLastUpdatedTimestamp(timestampMillis); result.setTimestampMillis(timestampMillis); result.setSourceType(OperationSourceType.valueOf(input.getSourceType().toString())); if (input.getPartition() != null) { - result.setPartitionSpec(new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); + result.setPartitionSpec( + new PartitionSpec().setType(PartitionType.PARTITION).setPartition(input.getPartition())); } if (input.getCustomProperties() != null) { @@ -102,7 +105,8 @@ private Operation mapOperation(final ReportOperationInput input, final QueryCont return result; } - private StringMap mapCustomProperties(final List properties) throws URISyntaxException { + private StringMap mapCustomProperties(final List properties) + throws URISyntaxException { final StringMap result = new StringMap(); for (StringMapEntryInput entry : properties) { result.put(entry.getKey(), entry.getValue()); @@ -113,16 +117,21 @@ private StringMap mapCustomProperties(final List properties private void validateInput(final Urn entityUrn, final ReportOperationInput input) { if (!SUPPORTED_ENTITY_TYPES.contains(entityUrn.getEntityType())) { throw new DataHubGraphQLException( - String.format("Unable to report operation. Invalid entity type %s provided.", entityUrn.getEntityType()), + String.format( + "Unable to report operation. Invalid entity type %s provided.", + entityUrn.getEntityType()), DataHubGraphQLErrorCode.BAD_REQUEST); } } - private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, final QueryContext context) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())) - )); + private boolean isAuthorizedToReportOperationForResource( + final Urn resourceUrn, final QueryContext context) { + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_OPERATIONS_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -131,4 +140,4 @@ private boolean isAuthorizedToReportOperationForResource(final Urn resourceUrn, resourceUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java index 4cfe58072aae9d..a0cffa5eca44c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolver.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.metadata.service.OwnershipTypeService; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; @@ -16,17 +18,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class CreateOwnershipTypeResolver implements DataFetcher> { +public class CreateOwnershipTypeResolver + implements DataFetcher> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateOwnershipTypeInput input = bindArgument(environment.getArgument("input"), CreateOwnershipTypeInput.class); @@ -36,19 +37,25 @@ public CompletableFuture get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - final Urn urn = _ownershipTypeService.createOwnershipType(input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - return createOwnershipType(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn urn = + _ownershipTypeService.createOwnershipType( + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + return createOwnershipType(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - private OwnershipTypeEntity createOwnershipType(@Nonnull final Urn urn, - @Nonnull final CreateOwnershipTypeInput input) { + private OwnershipTypeEntity createOwnershipType( + @Nonnull final Urn urn, @Nonnull final CreateOwnershipTypeInput input) { return OwnershipTypeEntity.builder() .setUrn(urn.toString()) .setType(EntityType.CUSTOM_OWNERSHIP_TYPE) diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java index 87cf70193d7fd7..c5bb58a7d4b2e0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolver.java @@ -12,7 +12,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteOwnershipTypeResolver implements DataFetcher> { @@ -26,21 +25,26 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn urn = UrnUtils.getUrn(ownershipTypeUrn); // By default, delete references final boolean deleteReferences = - environment.getArgument("deleteReferences") == null ? true : environment.getArgument("deleteReferences"); + environment.getArgument("deleteReferences") == null + ? true + : environment.getArgument("deleteReferences"); if (!AuthorizationUtils.canManageOwnershipTypes(context)) { throw new AuthorizationException( "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.deleteOwnershipType(urn, deleteReferences, context.getAuthentication()); - log.info(String.format("Successfully deleted ownership type %s with urn", urn)); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.deleteOwnershipType( + urn, deleteReferences, context.getAuthentication()); + log.info(String.format("Successfully deleted ownership type %s with urn", urn)); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to delete ownership type with urn %s", ownershipTypeUrn), e); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java index 70441815f0a747..1c8f43a4901737 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolver.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesInput; import com.linkedin.datahub.graphql.generated.ListOwnershipTypesResult; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -24,18 +26,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListOwnershipTypesResolver implements - DataFetcher> { +public class ListOwnershipTypesResolver + implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; @@ -44,43 +42,47 @@ public class ListOwnershipTypesResolver implements private final EntityClient _entityClient; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListOwnershipTypesInput input = bindArgument(environment.getArgument("input"), - ListOwnershipTypesInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final List filters = input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - + final ListOwnershipTypesInput input = + bindArgument(environment.getArgument("input"), ListOwnershipTypesInput.class); - try { + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final List filters = + input.getFilters() == null ? Collections.emptyList() : input.getFilters(); - final SearchResult gmsResult = _entityClient.search( - Constants.OWNERSHIP_TYPE_ENTITY_NAME, - query, - buildFilter(filters, Collections.emptyList()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { - final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setOwnershipTypes(mapUnresolvedOwnershipTypes(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list custom ownership types", e); - } + final SearchResult gmsResult = + _entityClient.search( + Constants.OWNERSHIP_TYPE_ENTITY_NAME, + query, + buildFilter(filters, Collections.emptyList()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - }); + final ListOwnershipTypesResult result = new ListOwnershipTypesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setOwnershipTypes( + mapUnresolvedOwnershipTypes( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list custom ownership types", e); + } + }); } private List mapUnresolvedOwnershipTypes(List entityUrns) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java index 43fd2493043975..839121a295d9ac 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -17,17 +19,16 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class UpdateOwnershipTypeResolver implements DataFetcher> { +public class UpdateOwnershipTypeResolver + implements DataFetcher> { private final OwnershipTypeService _ownershipTypeService; @Override - public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final UpdateOwnershipTypeInput input = @@ -39,27 +40,35 @@ public CompletableFuture get(DataFetchingEnvironment enviro "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - _ownershipTypeService.updateOwnershipType(urn, input.getName(), input.getDescription(), - context.getAuthentication(), System.currentTimeMillis()); - log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); - return getOwnershipType(urn, context.getAuthentication()); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _ownershipTypeService.updateOwnershipType( + urn, + input.getName(), + input.getDescription(), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated Ownership Type %s with urn", urn)); + return getOwnershipType(urn, context.getAuthentication()); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private OwnershipTypeEntity getOwnershipType(@Nonnull final Urn urn, - @Nonnull final Authentication authentication) { - final EntityResponse maybeResponse = _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); + private OwnershipTypeEntity getOwnershipType( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { + final EntityResponse maybeResponse = + _ownershipTypeService.getOwnershipTypeEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", + String.format( + "Failed to perform update to Ownership Type with urn %s. Failed to find Ownership Type in GMS.", urn)); } return OwnershipTypeMapper.map(maybeResponse); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java index 485d40e60547e8..567745b894ca9d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/DeletePolicyResolver.java @@ -9,10 +9,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; - -/** - * Resolver responsible for hard deleting a particular DataHub access control policy. - */ +/** Resolver responsible for hard deleting a particular DataHub access control policy. */ public class DeletePolicyResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -27,18 +24,24 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final String policyUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(policyUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return policyUrn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against policy with urn %s", policyUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return policyUrn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against policy with urn %s", policyUrn), + e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java index 11f7793db82c8b..3328eff2bdf45b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/GetGrantedPrivilegesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.AuthorizerChain; import com.datahub.authorization.DataHubAuthorizer; import com.datahub.authorization.EntitySpec; @@ -14,17 +16,15 @@ import java.util.Optional; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - /** - * Resolver to support the getGrantedPrivileges end point - * Fetches all privileges that are granted for the given actor for the given resource (optional) + * Resolver to support the getGrantedPrivileges end point Fetches all privileges that are granted + * for the given actor for the given resource (optional) */ public class GetGrantedPrivilegesResolver implements DataFetcher> { @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final GetGrantedPrivilegesInput input = @@ -33,22 +33,27 @@ public CompletableFuture get(final DataFetchingEnvironment environme if (!isAuthorized(context, actor)) { throw new AuthorizationException("Unauthorized to get privileges for the given author."); } - final Optional resourceSpec = Optional.ofNullable(input.getResourceSpec()) - .map(spec -> new EntitySpec(EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); + final Optional resourceSpec = + Optional.ofNullable(input.getResourceSpec()) + .map( + spec -> + new EntitySpec( + EntityTypeMapper.getName(spec.getResourceType()), spec.getResourceUrn())); if (context.getAuthorizer() instanceof AuthorizerChain) { - DataHubAuthorizer dataHubAuthorizer = ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); + DataHubAuthorizer dataHubAuthorizer = + ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer(); List privileges = dataHubAuthorizer.getGrantedPrivileges(actor, resourceSpec); - return CompletableFuture.supplyAsync(() -> Privileges.builder() - .setPrivileges(privileges) - .build()); + return CompletableFuture.supplyAsync( + () -> Privileges.builder().setPrivileges(privileges).build()); } throw new UnsupportedOperationException( - String.format("GetGrantedPrivileges function is not supported on authorizer of type %s", + String.format( + "GetGrantedPrivileges function is not supported on authorizer of type %s", context.getAuthorizer().getClass().getSimpleName())); } private boolean isAuthorized(final QueryContext context, final String actor) { return actor.equals(context.getActorUrn()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java index b44da1c2f832c6..87832b8c3aa401 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/ListPoliciesResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; + import com.datahub.authorization.PolicyFetcher; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -14,9 +16,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; - - public class ListPoliciesResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -30,18 +29,22 @@ public ListPoliciesResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (PolicyAuthUtils.canManagePolicies(context)) { - final ListPoliciesInput input = bindArgument(environment.getArgument("input"), ListPoliciesInput.class); + final ListPoliciesInput input = + bindArgument(environment.getArgument("input"), ListPoliciesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return _policyFetcher.fetchPolicies(start, query, count, context.getAuthentication()) - .thenApply(policyFetchResult -> { + return _policyFetcher + .fetchPolicies(start, query, count, context.getAuthentication()) + .thenApply( + policyFetchResult -> { final ListPoliciesResult result = new ListPoliciesResult(); result.setStart(start); result.setCount(count); @@ -50,14 +53,18 @@ public CompletableFuture get(final DataFetchingEnvironment e return result; }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List mapEntities(final List policies) { - return policies.stream().map(policy -> { - Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); - mappedPolicy.setUrn(policy.getUrn().toString()); - return mappedPolicy; - }).collect(Collectors.toList()); + return policies.stream() + .map( + policy -> { + Policy mappedPolicy = PolicyInfoPolicyMapper.map(policy.getPolicyInfo()); + mappedPolicy.setUrn(policy.getUrn().toString()); + return mappedPolicy; + }) + .collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java index dcc5d1fd23302e..d0446d218dac6b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/PolicyAuthUtils.java @@ -1,19 +1,23 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; + import com.datahub.plugins.auth.authorization.Authorizer; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.metadata.authorization.PoliciesConfig; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.AuthUtils.*; public class PolicyAuthUtils { static boolean canManagePolicies(@Nonnull QueryContext context) { final Authorizer authorizer = context.getAuthorizer(); final String principal = context.getActorUrn(); - return isAuthorized(principal, ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), authorizer); + return isAuthorized( + principal, + ImmutableList.of(PoliciesConfig.MANAGE_POLICIES_PRIVILEGE.getType()), + authorizer); } - private PolicyAuthUtils() { } + private PolicyAuthUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java index 6dcc143a1a3af4..dcdf78ebc15bb1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/UpsertPolicyResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.policy; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; + import com.datahub.authorization.AuthorizerChain; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,10 +19,6 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; - - public class UpsertPolicyResolver implements DataFetcher> { private static final String POLICY_ENTITY_NAME = "dataHubPolicy"; @@ -38,7 +37,8 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (PolicyAuthUtils.canManagePolicies(context)) { final Optional policyUrn = Optional.ofNullable(environment.getArgument("urn")); - final PolicyUpdateInput input = bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); + final PolicyUpdateInput input = + bindArgument(environment.getArgument("input"), PolicyUpdateInput.class); // Finally, create the MetadataChangeProposal. final MetadataChangeProposal proposal; @@ -48,7 +48,9 @@ public CompletableFuture get(final DataFetchingEnvironment environment) if (policyUrn.isPresent()) { // Update existing policy - proposal = buildMetadataChangeProposalWithUrn(Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithUrn( + Urn.createFromString(policyUrn.get()), POLICY_INFO_ASPECT_NAME, info); } else { // Create new policy // Since we are creating a new Policy, we need to generate a unique UUID. @@ -58,21 +60,29 @@ public CompletableFuture get(final DataFetchingEnvironment environment) // Create the Policy key. final DataHubPolicyKey key = new DataHubPolicyKey(); key.setId(uuidStr); - proposal = buildMetadataChangeProposalWithKey(key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); + proposal = + buildMetadataChangeProposalWithKey( + key, POLICY_ENTITY_NAME, POLICY_INFO_ASPECT_NAME, info); } - return CompletableFuture.supplyAsync(() -> { - try { - String urn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - if (context.getAuthorizer() instanceof AuthorizerChain) { - ((AuthorizerChain) context.getAuthorizer()).getDefaultAuthorizer().invalidateCache(); - } - return urn; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String urn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + if (context.getAuthorizer() instanceof AuthorizerChain) { + ((AuthorizerChain) context.getAuthorizer()) + .getDefaultAuthorizer() + .invalidateCache(); + } + return urn; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java index b9a6bf07be8c86..a350fb91f9d3b8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyInfoPolicyMapper.java @@ -2,6 +2,7 @@ import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.Policy; import com.linkedin.datahub.graphql.generated.PolicyMatchCondition; import com.linkedin.datahub.graphql.generated.PolicyMatchCriterion; @@ -9,7 +10,6 @@ import com.linkedin.datahub.graphql.generated.PolicyMatchFilter; import com.linkedin.datahub.graphql.generated.PolicyState; import com.linkedin.datahub.graphql.generated.PolicyType; -import com.linkedin.datahub.graphql.generated.ActorFilter; import com.linkedin.datahub.graphql.generated.ResourceFilter; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -20,9 +20,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** - * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link com.linkedin.datahub.graphql.generated.Policy}. + * Maps {@link com.linkedin.policy.DataHubPolicyInfo} to GraphQL {@link + * com.linkedin.datahub.graphql.generated.Policy}. */ public class PolicyInfoPolicyMapper implements ModelMapper { @@ -56,16 +56,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { result.setResourceOwners(actorFilter.isResourceOwners()); UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -87,14 +91,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -102,7 +112,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java index cb323b60dd4653..d82d71295d41b9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/policy/mappers/PolicyUpdateInputInfoMapper.java @@ -19,11 +19,9 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -/** - * Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. - */ -public class PolicyUpdateInputInfoMapper implements ModelMapper { +/** Maps GraphQL {@link PolicyUpdateInput} to DataHub backend {@link DataHubPolicyInfo}. */ +public class PolicyUpdateInputInfoMapper + implements ModelMapper { public static final PolicyUpdateInputInfoMapper INSTANCE = new PolicyUpdateInputInfoMapper(); @@ -52,13 +50,21 @@ private DataHubActorFilter mapActors(final ActorFilterInput actorInput) { result.setAllUsers(actorInput.getAllUsers()); result.setResourceOwners(actorInput.getResourceOwners()); if (actorInput.getResourceOwnersTypes() != null) { - result.setResourceOwnersTypes(new UrnArray(actorInput.getResourceOwnersTypes().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setResourceOwnersTypes( + new UrnArray( + actorInput.getResourceOwnersTypes().stream() + .map(this::createUrn) + .collect(Collectors.toList()))); } if (actorInput.getGroups() != null) { - result.setGroups(new UrnArray(actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setGroups( + new UrnArray( + actorInput.getGroups().stream().map(this::createUrn).collect(Collectors.toList()))); } if (actorInput.getUsers() != null) { - result.setUsers(new UrnArray(actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); + result.setUsers( + new UrnArray( + actorInput.getUsers().stream().map(this::createUrn).collect(Collectors.toList()))); } return result; } @@ -83,19 +89,26 @@ private DataHubResourceFilter mapResources(final ResourceFilterInput resourceInp } private PolicyMatchFilter mapFilter(final PolicyMatchFilterInput filter) { - return new PolicyMatchFilter().setCriteria(new PolicyMatchCriterionArray(filter.getCriteria() - .stream() - .map(criterion -> new PolicyMatchCriterion().setField(criterion.getField()) - .setValues(new StringArray(criterion.getValues())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name()))) - .collect(Collectors.toList()))); + return new PolicyMatchFilter() + .setCriteria( + new PolicyMatchCriterionArray( + filter.getCriteria().stream() + .map( + criterion -> + new PolicyMatchCriterion() + .setField(criterion.getField()) + .setValues(new StringArray(criterion.getValues())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name()))) + .collect(Collectors.toList()))); } private Urn createUrn(String urnStr) { try { return Urn.createFromString(urnStr); } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urnStr %s into an URN object", urnStr), e); + throw new RuntimeException( + String.format("Failed to convert urnStr %s into an URN object", urnStr), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java index 524caf14e9afe4..8e0ee335e09f38 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -18,16 +20,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreatePostResolver implements DataFetcher> { private final PostService _postService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { @@ -35,7 +35,8 @@ public CompletableFuture get(final DataFetchingEnvironment environment) "Unauthorized to create posts. Please contact your DataHub administrator if this needs corrective action."); } - final CreatePostInput input = bindArgument(environment.getArgument("input"), CreatePostInput.class); + final CreatePostInput input = + bindArgument(environment.getArgument("input"), CreatePostInput.class); final PostType type = input.getPostType(); final UpdatePostContentInput content = input.getContent(); final PostContentType contentType = content.getContentType(); @@ -45,16 +46,21 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final UpdateMediaInput updateMediaInput = content.getMedia(); final Authentication authentication = context.getAuthentication(); - Media media = updateMediaInput == null ? null - : _postService.mapMedia(updateMediaInput.getType().toString(), updateMediaInput.getLocation()); - PostContent postContent = _postService.mapPostContent(contentType.toString(), title, description, link, media); - - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.createPost(type.toString(), postContent, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + Media media = + updateMediaInput == null + ? null + : _postService.mapMedia( + updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = + _postService.mapPostContent(contentType.toString(), title, description, link, media); + + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.createPost(type.toString(), postContent, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java index d3cd0126fb8527..7ab5d1381a1b30 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolver.java @@ -13,14 +13,14 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeletePostResolver implements DataFetcher> { private final PostService _postService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!AuthorizationUtils.canManageGlobalAnnouncements(context)) { @@ -31,12 +31,13 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final Urn postUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return _postService.deletePost(postUrn, authentication); - } catch (Exception e) { - throw new RuntimeException("Failed to create a new post", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return _postService.deletePost(postUrn, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java index 59f2b458fdc90c..5292adbe3aac39 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -22,10 +25,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListPostsResolver implements DataFetcher> { @@ -36,38 +35,58 @@ public class ListPostsResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final ListPostsInput input = bindArgument(environment.getArgument("input"), ListPostsInput.class); + final ListPostsInput input = + bindArgument(environment.getArgument("input"), ListPostsInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(LAST_MODIFIED_FIELD_NAME).setOrder(SortOrder.DESCENDING); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion() + .setField(LAST_MODIFIED_FIELD_NAME) + .setOrder(SortOrder.DESCENDING); - // First, get all Post Urns. - final SearchResult gmsResult = _entityClient.search(POST_ENTITY_NAME, query, null, sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + // First, get all Post Urns. + final SearchResult gmsResult = + _entityClient.search( + POST_ENTITY_NAME, + query, + null, + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all Posts. - final Map entities = _entityClient.batchGetV2(POST_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, authentication); + // Then, get and hydrate all Posts. + final Map entities = + _entityClient.batchGetV2( + POST_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + authentication); - final ListPostsResult result = new ListPostsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setPosts(entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list posts", e); - } - }); + final ListPostsResult result = new ListPostsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setPosts( + entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list posts", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java index 27de443bc100a0..48f31fb75d371c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -7,8 +9,8 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateQueryInput; -import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.CreateQuerySubjectInput; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.types.query.QueryMapper; import com.linkedin.metadata.service.QueryService; import com.linkedin.query.QueryLanguage; @@ -22,9 +24,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateQueryResolver implements DataFetcher> { @@ -32,40 +31,49 @@ public class CreateQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateQueryInput input = bindArgument(environment.getArgument("input"), CreateQueryInput.class); + final CreateQueryInput input = + bindArgument(environment.getArgument("input"), CreateQueryInput.class); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (!AuthorizationUtils.canCreateQuery(input.getSubjects() - .stream() - .map(CreateQuerySubjectInput::getDatasetUrn).map(UrnUtils::getUrn) - .collect(Collectors.toList()), context)) { - throw new AuthorizationException( - "Unauthorized to create Query. Please contact your DataHub administrator for more information."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateQuery( + input.getSubjects().stream() + .map(CreateQuerySubjectInput::getDatasetUrn) + .map(UrnUtils::getUrn) + .collect(Collectors.toList()), + context)) { + throw new AuthorizationException( + "Unauthorized to create Query. Please contact your DataHub administrator for more information."); + } - try { - final Urn queryUrn = _queryService.createQuery( - input.getProperties().getName(), - input.getProperties().getDescription(), - QuerySource.MANUAL, - new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())), - input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()), - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create a new Query from input %s", input), e); - } - }); + try { + final Urn queryUrn = + _queryService.createQuery( + input.getProperties().getName(), + input.getProperties().getDescription(), + QuerySource.MANUAL, + new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())), + input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()), + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create a new Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java index 5c5bb288f32bf6..4f5887c91b4947 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolver.java @@ -18,7 +18,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class DeleteQueryResolver implements DataFetcher> { @@ -26,29 +25,34 @@ public class DeleteQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); - final List subjectUrns = existingSubjects != null - ? existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()) - : Collections.emptyList(); - - if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); - } - - try { - _queryService.deleteQuery(queryUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException("Failed to delete Query", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); + final List subjectUrns = + existingSubjects != null + ? existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()) + : Collections.emptyList(); + + if (!AuthorizationUtils.canDeleteQuery(queryUrn, subjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to delete Query. Please contact your DataHub administrator if this needs corrective action."); + } + + try { + _queryService.deleteQuery(queryUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException("Failed to delete Query", e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java index c7e70cac15bdb4..fec5bb120eebae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -29,10 +32,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListQueriesResolver implements DataFetcher> { @@ -48,38 +47,52 @@ public class ListQueriesResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListQueriesInput input = bindArgument(environment.getArgument("input"), ListQueriesInput.class); + final ListQueriesInput input = + bindArgument(environment.getArgument("input"), ListQueriesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - final SortCriterion sortCriterion = - new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); - - // First, get all Query Urns. - final SearchResult gmsResult = _entityClient.search(QUERY_ENTITY_NAME, query, buildFilters(input), sortCriterion, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true).setSkipHighlighting(true)); - - final ListQueriesResult result = new ListQueriesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setQueries(mapUnresolvedQueries(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Queries", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final SortCriterion sortCriterion = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); + + // First, get all Query Urns. + final SearchResult gmsResult = + _entityClient.search( + QUERY_ENTITY_NAME, + query, + buildFilters(input), + sortCriterion, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true).setSkipHighlighting(true)); + + final ListQueriesResult result = new ListQueriesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setQueries( + mapUnresolvedQueries( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Queries", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial Query objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Query objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedQueries(final List queryUrns) { final List results = new ArrayList<>(); for (final Urn urn : queryUrns) { @@ -99,13 +112,23 @@ private Filter buildFilters(@Nonnull final ListQueriesInput input) { // Optionally add a source filter. if (input.getSource() != null) { andConditions.add( - new FacetFilterInput(QUERY_SOURCE_FIELD, null, ImmutableList.of(input.getSource().toString()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_SOURCE_FIELD, + null, + ImmutableList.of(input.getSource().toString()), + false, + FilterOperator.EQUAL)); } // Optionally add an entity type filter. if (input.getDatasetUrn() != null) { andConditions.add( - new FacetFilterInput(QUERY_ENTITIES_FIELD, null, ImmutableList.of(input.getDatasetUrn()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + QUERY_ENTITIES_FIELD, + null, + ImmutableList.of(input.getDatasetUrn()), + false, + FilterOperator.EQUAL)); } criteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java index ef34e91d8fe772..cc284aaf7b5637 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -26,9 +28,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class UpdateQueryResolver implements DataFetcher> { @@ -36,60 +35,72 @@ public class UpdateQueryResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateQueryInput input = bindArgument(environment.getArgument("input"), UpdateQueryInput.class); + final UpdateQueryInput input = + bindArgument(environment.getArgument("input"), UpdateQueryInput.class); final Urn queryUrn = UrnUtils.getUrn(environment.getArgument("urn")); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - final QuerySubjects existingSubjects = _queryService.getQuerySubjects(queryUrn, authentication); + return CompletableFuture.supplyAsync( + () -> { + final QuerySubjects existingSubjects = + _queryService.getQuerySubjects(queryUrn, authentication); - if (existingSubjects == null) { - // No Query Found - throw new DataHubGraphQLException(String.format("Failed to find query with urn %s", queryUrn), DataHubGraphQLErrorCode.NOT_FOUND); - } + if (existingSubjects == null) { + // No Query Found + throw new DataHubGraphQLException( + String.format("Failed to find query with urn %s", queryUrn), + DataHubGraphQLErrorCode.NOT_FOUND); + } - final List subjectUrns = existingSubjects.getSubjects().stream().map(QuerySubject::getEntity).collect(Collectors.toList()); - final List newSubjectUrns = input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) - .collect(Collectors.toList()) - : Collections.emptyList(); - final List impactedSubjectUrns = new ArrayList<>(); - impactedSubjectUrns.addAll(subjectUrns); - impactedSubjectUrns.addAll(newSubjectUrns); + final List subjectUrns = + existingSubjects.getSubjects().stream() + .map(QuerySubject::getEntity) + .collect(Collectors.toList()); + final List newSubjectUrns = + input.getSubjects() != null + ? input.getSubjects().stream() + .map(sub -> UrnUtils.getUrn(sub.getDatasetUrn())) + .collect(Collectors.toList()) + : Collections.emptyList(); + final List impactedSubjectUrns = new ArrayList<>(); + impactedSubjectUrns.addAll(subjectUrns); + impactedSubjectUrns.addAll(newSubjectUrns); - if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { - throw new AuthorizationException( - "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); - } + if (!AuthorizationUtils.canUpdateQuery(impactedSubjectUrns, context)) { + throw new AuthorizationException( + "Unauthorized to update Query. Please contact your DataHub administrator if this needs corrective action."); + } - try { - _queryService.updateQuery( - queryUrn, - input.getProperties() != null ? input.getProperties().getName() : null, - input.getProperties() != null ? input.getProperties().getDescription() : null, - input.getProperties() != null && input.getProperties().getStatement() != null - ? new QueryStatement() - .setValue(input.getProperties().getStatement().getValue()) - .setLanguage(QueryLanguage.valueOf(input.getProperties().getStatement().getLanguage().toString())) - : null, - input.getSubjects() != null - ? input.getSubjects() - .stream() - .map(sub -> new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) - .collect(Collectors.toList()) - : null, - authentication, - System.currentTimeMillis()); - return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update Query from input %s", input), e); - } - }); + try { + _queryService.updateQuery( + queryUrn, + input.getProperties() != null ? input.getProperties().getName() : null, + input.getProperties() != null ? input.getProperties().getDescription() : null, + input.getProperties() != null && input.getProperties().getStatement() != null + ? new QueryStatement() + .setValue(input.getProperties().getStatement().getValue()) + .setLanguage( + QueryLanguage.valueOf( + input.getProperties().getStatement().getLanguage().toString())) + : null, + input.getSubjects() != null + ? input.getSubjects().stream() + .map( + sub -> + new QuerySubject().setEntity(UrnUtils.getUrn(sub.getDatasetUrn()))) + .collect(Collectors.toList()) + : null, + authentication, + System.currentTimeMillis()); + return QueryMapper.map(_queryService.getQueryEntityResponse(queryUrn, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update Query from input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index df1a6d4d4b00dd..ca1e01b45989d2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.recommendation; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ContentParams; @@ -31,12 +33,10 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor -public class ListRecommendationsResolver implements DataFetcher> { +public class ListRecommendationsResolver + implements DataFetcher> { private static final ListRecommendationsResult EMPTY_RECOMMENDATIONS = new ListRecommendationsResult(Collections.emptyList()); @@ -49,24 +49,28 @@ public CompletableFuture get(DataFetchingEnvironment final ListRecommendationsInput input = bindArgument(environment.getArgument("input"), ListRecommendationsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Listing recommendations for input {}", input); - List modules = - _recommendationsService.listRecommendations(Urn.createFromString(input.getUserUrn()), - mapRequestContext(input.getRequestContext()), input.getLimit()); - return ListRecommendationsResult.builder() - .setModules(modules.stream() - .map(this::mapRecommendationModule) - .filter(Optional::isPresent) - .map(Optional::get) - .collect(Collectors.toList())) - .build(); - } catch (Exception e) { - log.error("Failed to get recommendations for input {}", input, e); - return EMPTY_RECOMMENDATIONS; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug("Listing recommendations for input {}", input); + List modules = + _recommendationsService.listRecommendations( + Urn.createFromString(input.getUserUrn()), + mapRequestContext(input.getRequestContext()), + input.getLimit()); + return ListRecommendationsResult.builder() + .setModules( + modules.stream() + .map(this::mapRecommendationModule) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList())) + .build(); + } catch (Exception e) { + log.error("Failed to get recommendations for input {}", input, e); + return EMPTY_RECOMMENDATIONS; + } + }); } private com.linkedin.metadata.recommendation.RecommendationRequestContext mapRequestContext( @@ -74,22 +78,24 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq com.linkedin.metadata.recommendation.ScenarioType mappedScenarioType; try { mappedScenarioType = - com.linkedin.metadata.recommendation.ScenarioType.valueOf(requestContext.getScenario().toString()); + com.linkedin.metadata.recommendation.ScenarioType.valueOf( + requestContext.getScenario().toString()); } catch (IllegalArgumentException e) { log.error("Failed to map scenario type: {}", requestContext.getScenario(), e); throw e; } com.linkedin.metadata.recommendation.RecommendationRequestContext mappedRequestContext = - new com.linkedin.metadata.recommendation.RecommendationRequestContext().setScenario(mappedScenarioType); + new com.linkedin.metadata.recommendation.RecommendationRequestContext() + .setScenario(mappedScenarioType); if (requestContext.getSearchRequestContext() != null) { SearchRequestContext searchRequestContext = new SearchRequestContext().setQuery(requestContext.getSearchRequestContext().getQuery()); if (requestContext.getSearchRequestContext().getFilters() != null) { - searchRequestContext.setFilters(new CriterionArray(requestContext.getSearchRequestContext() - .getFilters() - .stream() - .map(facetField -> criterionFromFilter(facetField)) - .collect(Collectors.toList()))); + searchRequestContext.setFilters( + new CriterionArray( + requestContext.getSearchRequestContext().getFilters().stream() + .map(facetField -> criterionFromFilter(facetField)) + .collect(Collectors.toList()))); } mappedRequestContext.setSearchRequestContext(searchRequestContext); } @@ -98,12 +104,17 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq try { entityUrn = Urn.createFromString(requestContext.getEntityRequestContext().getUrn()); } catch (URISyntaxException e) { - log.error("Malformed URN while mapping recommendations request: {}", - requestContext.getEntityRequestContext().getUrn(), e); + log.error( + "Malformed URN while mapping recommendations request: {}", + requestContext.getEntityRequestContext().getUrn(), + e); throw new IllegalArgumentException(e); } - EntityRequestContext entityRequestContext = new EntityRequestContext().setUrn(entityUrn) - .setType(EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); + EntityRequestContext entityRequestContext = + new EntityRequestContext() + .setUrn(entityUrn) + .setType( + EntityTypeMapper.getName(requestContext.getEntityRequestContext().getType())); mappedRequestContext.setEntityRequestContext(entityRequestContext); } return mappedRequestContext; @@ -115,13 +126,16 @@ private Optional mapRecommendationModule( mappedModule.setTitle(module.getTitle()); mappedModule.setModuleId(module.getModuleId()); try { - mappedModule.setRenderType(RecommendationRenderType.valueOf(module.getRenderType().toString())); + mappedModule.setRenderType( + RecommendationRenderType.valueOf(module.getRenderType().toString())); } catch (IllegalArgumentException e) { log.error("Failed to map render type: {}", module.getRenderType(), e); throw e; } mappedModule.setContent( - module.getContent().stream().map(this::mapRecommendationContent).collect(Collectors.toList())); + module.getContent().stream() + .map(this::mapRecommendationContent) + .collect(Collectors.toList())); return Optional.of(mappedModule); } @@ -145,26 +159,31 @@ private RecommendationParams mapRecommendationParams( SearchParams searchParams = new SearchParams(); searchParams.setQuery(params.getSearchParams().getQuery()); if (!params.getSearchParams().getFilters().isEmpty()) { - searchParams.setFilters(params.getSearchParams() - .getFilters() - .stream() - .map(criterion -> FacetFilter.builder().setField(criterion.getField()).setValues( - ImmutableList.of(criterion.getValue())).build()) - .collect(Collectors.toList())); + searchParams.setFilters( + params.getSearchParams().getFilters().stream() + .map( + criterion -> + FacetFilter.builder() + .setField(criterion.getField()) + .setValues(ImmutableList.of(criterion.getValue())) + .build()) + .collect(Collectors.toList())); } mappedParams.setSearchParams(searchParams); } if (params.hasEntityProfileParams()) { Urn profileUrn = params.getEntityProfileParams().getUrn(); - mappedParams.setEntityProfileParams(EntityProfileParams.builder() - .setUrn(profileUrn.toString()) - .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) - .build()); + mappedParams.setEntityProfileParams( + EntityProfileParams.builder() + .setUrn(profileUrn.toString()) + .setType(EntityTypeMapper.getType(profileUrn.getEntityType())) + .build()); } if (params.hasContentParams()) { - mappedParams.setContentParams(ContentParams.builder().setCount(params.getContentParams().getCount()).build()); + mappedParams.setContentParams( + ContentParams.builder().setCount(params.getContentParams().getCount()).build()); } return mappedParams; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java index 43d975344ba25e..a71da7821f09cb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.datahub.authorization.role.RoleService; @@ -13,11 +15,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j - @RequiredArgsConstructor public class AcceptRoleResolver implements DataFetcher> { private final RoleService _roleService; @@ -27,25 +25,32 @@ public class AcceptRoleResolver implements DataFetcher get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final AcceptRoleInput input = bindArgument(environment.getArgument("input"), AcceptRoleInput.class); + final AcceptRoleInput input = + bindArgument(environment.getArgument("input"), AcceptRoleInput.class); final String inviteTokenStr = input.getInviteToken(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); - if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { - throw new RuntimeException(String.format("Invite token %s is invalid", inviteTokenStr)); - } - - final Urn roleUrn = _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); - _roleService.batchAssignRoleToActors(Collections.singletonList(authentication.getActor().toUrnStr()), roleUrn, - authentication); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to accept role using invite token %s", inviteTokenStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn inviteTokenUrn = _inviteTokenService.getInviteTokenUrn(inviteTokenStr); + if (!_inviteTokenService.isInviteTokenValid(inviteTokenUrn, authentication)) { + throw new RuntimeException( + String.format("Invite token %s is invalid", inviteTokenStr)); + } + + final Urn roleUrn = + _inviteTokenService.getInviteTokenRole(inviteTokenUrn, authentication); + _roleService.batchAssignRoleToActors( + Collections.singletonList(authentication.getActor().toUrnStr()), + roleUrn, + authentication); + + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to accept role using invite token %s", inviteTokenStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java index dc847069afae91..1997d0ac74601e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.linkedin.common.urn.Urn; @@ -13,10 +16,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class BatchAssignRoleResolver implements DataFetcher> { @@ -30,19 +29,22 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw "Unauthorized to assign roles. Please contact your DataHub administrator if this needs corrective action."); } - final BatchAssignRoleInput input = bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); + final BatchAssignRoleInput input = + bindArgument(environment.getArgument("input"), BatchAssignRoleInput.class); final String roleUrnStr = input.getRoleUrn(); final List actors = input.getActors(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); - _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Urn roleUrn = roleUrnStr == null ? null : Urn.createFromString(roleUrnStr); + _roleService.batchAssignRoleToActors(actors, roleUrn, authentication); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against input %s", input), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java index 6bdf52e2f89f1b..61ecf09fc91a51 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class CreateInviteTokenResolver implements DataFetcher> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to create invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final CreateInviteTokenInput input = bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); + final CreateInviteTokenInput input = + bindArgument(environment.getArgument("input"), CreateInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, true, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java index 0b0cbbb7ba4732..066753c4f7559f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -12,33 +15,34 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - @Slf4j @RequiredArgsConstructor public class GetInviteTokenResolver implements DataFetcher> { private final InviteTokenService _inviteTokenService; @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (!canManagePolicies(context)) { throw new AuthorizationException( "Unauthorized to get invite tokens. Please contact your DataHub administrator if this needs corrective action."); } - final GetInviteTokenInput input = bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); + final GetInviteTokenInput input = + bindArgument(environment.getArgument("input"), GetInviteTokenInput.class); final String roleUrnStr = input.getRoleUrn(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - try { - return new InviteToken(_inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to get invite token for role %s", roleUrnStr), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + return new InviteToken( + _inviteTokenService.getInviteToken(roleUrnStr, false, authentication)); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to get invite token for role %s", roleUrnStr), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java index 4746370d8603b8..a1dd9219f6549c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.DataHubRole; @@ -24,10 +27,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - @Slf4j @RequiredArgsConstructor public class ListRolesResolver implements DataFetcher> { @@ -38,36 +37,51 @@ public class ListRolesResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListRolesInput input = bindArgument(environment.getArgument("input"), ListRolesInput.class); + final ListRolesInput input = + bindArgument(environment.getArgument("input"), ListRolesInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all role Urns. - final SearchResult gmsResult = - _entityClient.search(DATAHUB_ROLE_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all role Urns. + final SearchResult gmsResult = + _entityClient.search( + DATAHUB_ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get and hydrate all users. - final Map entities = _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), - null, context.getAuthentication()); + // Then, get and hydrate all users. + final Map entities = + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - final ListRolesResult result = new ListRolesResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setRoles(mapEntitiesToRoles(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list roles", e); - } - }); + final ListRolesResult result = new ListRolesResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setRoles(mapEntitiesToRoles(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list roles", e); + } + }); } private List mapEntitiesToRoles(final Collection entities) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java index e9140441999e26..6d23456b76b4f4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AggregateAcrossEntitiesInput; @@ -14,25 +19,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.getEntityNames; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.mapInputFlags; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; /** - * Executes a search query only to get a provided list of aggregations back. - * Does not resolve any entities as results. + * Executes a search query only to get a provided list of aggregations back. Does not resolve any + * entities as results. */ @Slf4j @RequiredArgsConstructor -public class AggregateAcrossEntitiesResolver implements DataFetcher> { +public class AggregateAcrossEntitiesResolver + implements DataFetcher> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -48,47 +48,63 @@ public CompletableFuture get(DataFetchingEnvironment environme // escape forward slash since it is a reserved character in Elasticsearch final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - - final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - - final List facets = input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; - - try { - return mapAggregateResults(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - 0, - 0, // 0 entity count because we don't want resolved entities - searchFlags, - null, - ResolverUtils.getAuthentication(environment), - facets)); - } catch (Exception e) { - log.error( - "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", - input.getTypes(), input.getQuery(), input.getOrFilters()); - throw new RuntimeException( - "Failed to execute aggregate across entities: " + String.format("entity types %s, query %s, filters: %s", - input.getTypes(), input.getQuery(), input.getOrFilters()), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + + final SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + + final List facets = + input.getFacets() != null && input.getFacets().size() > 0 ? input.getFacets() : null; + + try { + return mapAggregateResults( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + 0, + 0, // 0 entity count because we don't want resolved entities + searchFlags, + null, + ResolverUtils.getAuthentication(environment), + facets)); + } catch (Exception e) { + log.error( + "Failed to execute aggregate across entities: entity types {}, query {}, filters: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters()); + throw new RuntimeException( + "Failed to execute aggregate across entities: " + + String.format( + "entity types %s, query %s, filters: %s", + input.getTypes(), input.getQuery(), input.getOrFilters()), + e); + } + }); } AggregateResults mapAggregateResults(SearchResult searchResult) { final AggregateResults results = new AggregateResults(); - results.setFacets(searchResult.getMetadata().getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + results.setFacets( + searchResult.getMetadata().getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return results; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java index 043ecf5eb97f18..c3e843cefd5c84 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.ValidationException; @@ -13,87 +17,90 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - import java.util.ArrayList; +import java.util.List; import java.util.Map; +import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; -import java.util.concurrent.CompletableFuture; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ -public class AutoCompleteForMultipleResolver implements DataFetcher> { +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ +public class AutoCompleteForMultipleResolver + implements DataFetcher> { - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteForMultipleResolver.class.getName()); - private final Map> _typeToEntity; - private final ViewService _viewService; + private final Map> _typeToEntity; + private final ViewService _viewService; - public AutoCompleteForMultipleResolver(@Nonnull final List> searchableEntities, @Nonnull final ViewService viewService) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - _viewService = viewService; - } + public AutoCompleteForMultipleResolver( + @Nonnull final List> searchableEntities, + @Nonnull final ViewService viewService) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + _viewService = viewService; + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final QueryContext context = environment.getContext(); - final AutoCompleteMultipleInput input = bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final QueryContext context = environment.getContext(); + final AutoCompleteMultipleInput input = + bindArgument(environment.getArgument("input"), AutoCompleteMultipleInput.class); - if (isBlank(input.getQuery())) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) + if (isBlank(input.getQuery())) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) : null; - List types = getEntityTypes(input.getTypes(), maybeResolvedView); - if (types != null && types.size() > 0) { - return AutocompleteUtils.batchGetAutocompleteResults( - types.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); - } - - // By default, autocomplete only against the Default Set of Autocomplete entities - return AutocompleteUtils.batchGetAutocompleteResults( - AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), - sanitizedQuery, - input, - environment, - maybeResolvedView); + List types = getEntityTypes(input.getTypes(), maybeResolvedView); + if (types != null && types.size() > 0) { + return AutocompleteUtils.batchGetAutocompleteResults( + types.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); } - /** - * Gets the intersection of provided input types and types on the view applied (if any) - */ - @Nullable - List getEntityTypes(final @Nullable List inputTypes, final @Nullable DataHubViewInfo maybeResolvedView) { - List types = inputTypes; - if (maybeResolvedView != null) { - List inputEntityTypes = types != null ? types : new ArrayList<>(); - final List inputEntityNames = inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - List stringEntityTypes = SearchUtils.intersectEntityTypes(inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); + // By default, autocomplete only against the Default Set of Autocomplete entities + return AutocompleteUtils.batchGetAutocompleteResults( + AUTO_COMPLETE_ENTITY_TYPES.stream().map(_typeToEntity::get).collect(Collectors.toList()), + sanitizedQuery, + input, + environment, + maybeResolvedView); + } - types = stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); - } + /** Gets the intersection of provided input types and types on the view applied (if any) */ + @Nullable + List getEntityTypes( + final @Nullable List inputTypes, + final @Nullable DataHubViewInfo maybeResolvedView) { + List types = inputTypes; + if (maybeResolvedView != null) { + List inputEntityTypes = types != null ? types : new ArrayList<>(); + final List inputEntityNames = + inputEntityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + List stringEntityTypes = + SearchUtils.intersectEntityTypes( + inputEntityNames, maybeResolvedView.getDefinition().getEntityTypes()); - return types; + types = + stringEntityTypes.stream().map(EntityTypeMapper::getType).collect(Collectors.toList()); } + + return types; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java index e13545aadc5167..235f5f8d278995 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteResolver.java @@ -1,90 +1,94 @@ package com.linkedin.datahub.graphql.resolvers.search; -import com.linkedin.datahub.graphql.types.SearchableEntityType; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static org.apache.commons.lang3.StringUtils.isBlank; + import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.AutoCompleteInput; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; - -import javax.annotation.Nonnull; import java.util.List; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nonnull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static org.apache.commons.lang3.StringUtils.isBlank; - -/** - * Resolver responsible for resolving the 'autocomplete' field of the Query type - */ +/** Resolver responsible for resolving the 'autocomplete' field of the Query type */ public class AutoCompleteResolver implements DataFetcher> { - private static final int DEFAULT_LIMIT = 5; + private static final int DEFAULT_LIMIT = 5; - private static final Logger _logger = LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); + private static final Logger _logger = + LoggerFactory.getLogger(AutoCompleteResolver.class.getName()); - private final Map> _typeToEntity; + private final Map> _typeToEntity; - public AutoCompleteResolver(@Nonnull final List> searchableEntities) { - _typeToEntity = searchableEntities.stream().collect(Collectors.toMap( - SearchableEntityType::type, - entity -> entity - )); - } + public AutoCompleteResolver(@Nonnull final List> searchableEntities) { + _typeToEntity = + searchableEntities.stream() + .collect(Collectors.toMap(SearchableEntityType::type, entity -> entity)); + } - @Override - public CompletableFuture get(DataFetchingEnvironment environment) { - final AutoCompleteInput input = bindArgument(environment.getArgument("input"), AutoCompleteInput.class); + @Override + public CompletableFuture get(DataFetchingEnvironment environment) { + final AutoCompleteInput input = + bindArgument(environment.getArgument("input"), AutoCompleteInput.class); - // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); - if (isBlank(sanitizedQuery)) { - _logger.error("'query' parameter was null or empty"); - throw new ValidationException("'query' parameter can not be null or empty"); - } + // escape forward slash since it is a reserved character in Elasticsearch + final String sanitizedQuery = ResolverUtils.escapeForwardSlash(input.getQuery()); + if (isBlank(sanitizedQuery)) { + _logger.error("'query' parameter was null or empty"); + throw new ValidationException("'query' parameter can not be null or empty"); + } - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - return CompletableFuture.supplyAsync(() -> { - try { - _logger.debug("Executing autocomplete. " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; + return CompletableFuture.supplyAsync( + () -> { + try { + _logger.debug( + "Executing autocomplete. " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), input.getLimit())); - return _typeToEntity.get(input.getType()).autoComplete( - sanitizedQuery, - input.getField(), - filter, - limit, - environment.getContext() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", + return _typeToEntity + .get(input.getType()) + .autoComplete( + sanitizedQuery, input.getField(), filter, limit, environment.getContext()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", input.getType(), input.getField(), input.getQuery(), input.getFilters(), - input.getLimit()) + " " - + e.getMessage()); - throw new RuntimeException("Failed to execute autocomplete: " - + String.format("entity type %s, field %s, query %s, filters: %s, limit: %s", - input.getType(), - input.getField(), - input.getQuery(), - input.getFilters(), - input.getLimit()), e); - } - }); - } + input.getLimit()) + + " " + + e.getMessage()); + throw new RuntimeException( + "Failed to execute autocomplete: " + + String.format( + "entity type %s, field %s, query %s, filters: %s, limit: %s", + input.getType(), + input.getField(), + input.getQuery(), + input.getFilters(), + input.getLimit()), + e); + } + }); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java index 40722211de8d3a..9cd860781c0d68 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/AutocompleteUtils.java @@ -14,69 +14,81 @@ import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import javax.annotation.Nullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import javax.annotation.Nullable; - - public class AutocompleteUtils { private static final Logger _logger = LoggerFactory.getLogger(AutocompleteUtils.class.getName()); private static final int DEFAULT_LIMIT = 5; - private AutocompleteUtils() { } + private AutocompleteUtils() {} public static CompletableFuture batchGetAutocompleteResults( List> entities, String sanitizedQuery, AutoCompleteMultipleInput input, DataFetchingEnvironment environment, - @Nullable DataHubViewInfo view - ) { + @Nullable DataHubViewInfo view) { final int limit = input.getLimit() != null ? input.getLimit() : DEFAULT_LIMIT; - final List> autoCompletesFuture = entities.stream().map(entity -> CompletableFuture.supplyAsync(() -> { - final Filter filter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - final Filter finalFilter = view != null - ? SearchUtils.combineFilters(filter, view.getDefinition().getFilter()) - : filter; + final List> autoCompletesFuture = + entities.stream() + .map( + entity -> + CompletableFuture.supplyAsync( + () -> { + final Filter filter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + final Filter finalFilter = + view != null + ? SearchUtils.combineFilters( + filter, view.getDefinition().getFilter()) + : filter; - try { - final AutoCompleteResults searchResult = entity.autoComplete( - sanitizedQuery, - input.getField(), - finalFilter, - limit, - environment.getContext() - ); - return new AutoCompleteResultForEntity( - entity.type(), - searchResult.getSuggestions(), - searchResult.getEntities() - ); - } catch (Exception e) { - _logger.error("Failed to execute autocomplete all: " - + String.format("field %s, query %s, filters: %s, limit: %s", - input.getField(), - input.getQuery(), - filter, - input.getLimit()), e); - return new AutoCompleteResultForEntity(entity.type(), Collections.emptyList(), Collections.emptyList()); - } - })).collect(Collectors.toList()); + try { + final AutoCompleteResults searchResult = + entity.autoComplete( + sanitizedQuery, + input.getField(), + finalFilter, + limit, + environment.getContext()); + return new AutoCompleteResultForEntity( + entity.type(), + searchResult.getSuggestions(), + searchResult.getEntities()); + } catch (Exception e) { + _logger.error( + "Failed to execute autocomplete all: " + + String.format( + "field %s, query %s, filters: %s, limit: %s", + input.getField(), + input.getQuery(), + filter, + input.getLimit()), + e); + return new AutoCompleteResultForEntity( + entity.type(), Collections.emptyList(), Collections.emptyList()); + } + })) + .collect(Collectors.toList()); return CompletableFuture.allOf(autoCompletesFuture.toArray(new CompletableFuture[0])) - .thenApplyAsync((res) -> { - AutoCompleteMultipleResults result = new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); - List suggestions = autoCompletesFuture.stream() - .map(CompletableFuture::join) - .filter( + .thenApplyAsync( + (res) -> { + AutoCompleteMultipleResults result = + new AutoCompleteMultipleResults(sanitizedQuery, new ArrayList<>()); + List suggestions = + autoCompletesFuture.stream() + .map(CompletableFuture::join) + .filter( autoCompleteResultForEntity -> - autoCompleteResultForEntity.getSuggestions() != null && autoCompleteResultForEntity.getSuggestions().size() > 0 - ) - .collect(Collectors.toList()); - result.setSuggestions(suggestions); - return result; - }); + autoCompleteResultForEntity.getSuggestions() != null + && autoCompleteResultForEntity.getSuggestions().size() > 0) + .collect(Collectors.toList()); + result.setSuggestions(suggestions); + return result; + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java index 17058fd8d7cffb..e54955e1857f09 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.Entity; @@ -18,26 +23,20 @@ import com.linkedin.view.DataHubViewInfo; import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; -import lombok.RequiredArgsConstructor; -import lombok.extern.slf4j.Slf4j; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.resolveView; - +import javax.annotation.Nonnull; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; @Slf4j @RequiredArgsConstructor -public class GetQuickFiltersResolver implements DataFetcher> { +public class GetQuickFiltersResolver + implements DataFetcher> { private final EntityClient _entityClient; private final ViewService _viewService; @@ -47,41 +46,51 @@ public class GetQuickFiltersResolver implements DataFetcher get(final DataFetchingEnvironment environment) throws Exception { - final GetQuickFiltersInput input = bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); - - return CompletableFuture.supplyAsync(() -> { - final GetQuickFiltersResult result = new GetQuickFiltersResult(); - final List quickFilters = new ArrayList<>(); - - try { - final SearchResult searchResult = getSearchResults(ResolverUtils.getAuthentication(environment), input); - final AggregationMetadataArray aggregations = searchResult.getMetadata().getAggregations(); - - quickFilters.addAll(getPlatformQuickFilters(aggregations)); - quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); - } catch (Exception e) { - log.error("Failed getting quick filters", e); - throw new RuntimeException("Failed to to get quick filters", e); - } - - result.setQuickFilters(quickFilters); - return result; - }); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final GetQuickFiltersInput input = + bindArgument(environment.getArgument("input"), GetQuickFiltersInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final GetQuickFiltersResult result = new GetQuickFiltersResult(); + final List quickFilters = new ArrayList<>(); + + try { + final SearchResult searchResult = + getSearchResults(ResolverUtils.getAuthentication(environment), input); + final AggregationMetadataArray aggregations = + searchResult.getMetadata().getAggregations(); + + quickFilters.addAll(getPlatformQuickFilters(aggregations)); + quickFilters.addAll(getEntityTypeQuickFilters(aggregations)); + } catch (Exception e) { + log.error("Failed getting quick filters", e); + throw new RuntimeException("Failed to to get quick filters", e); + } + + result.setQuickFilters(quickFilters); + return result; + }); } - /** - * Do a star search with view filter applied to get info about all data in this instance. - */ - private SearchResult getSearchResults(@Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) throws Exception { - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) - : null; - final List entityNames = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + /** Do a star search with view filter applied to get info about all data in this instance. */ + private SearchResult getSearchResults( + @Nonnull final Authentication authentication, @Nonnull final GetQuickFiltersInput input) + throws Exception { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), authentication) + : null; + final List entityNames = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); return _entityClient.searchAcrossEntities( maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) : entityNames, "*", maybeResolvedView != null @@ -95,67 +104,88 @@ private SearchResult getSearchResults(@Nonnull final Authentication authenticati } /** - * Get platforms and their count from an aggregations array, sorts by entity count, and map the top 5 to quick filters + * Get platforms and their count from an aggregations array, sorts by entity count, and map the + * top 5 to quick filters */ - private List getPlatformQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List getPlatformQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List platforms = new ArrayList<>(); - final Optional platformAggregations = aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); + final Optional platformAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(PLATFORM)).findFirst(); if (platformAggregations.isPresent()) { final List sortedPlatforms = - platformAggregations.get().getFilterValues().stream().sorted(Comparator.comparingLong(val -> -val.getFacetCount())).collect(Collectors.toList()); - sortedPlatforms.forEach(platformFilter -> { - if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { - platforms.add(mapQuickFilter(PLATFORM, platformFilter)); - } - }); + platformAggregations.get().getFilterValues().stream() + .sorted(Comparator.comparingLong(val -> -val.getFacetCount())) + .collect(Collectors.toList()); + sortedPlatforms.forEach( + platformFilter -> { + if (platforms.size() < PLATFORM_COUNT && platformFilter.getFacetCount() > 0) { + platforms.add(mapQuickFilter(PLATFORM, platformFilter)); + } + }); } // return platforms sorted alphabetically by their name - return platforms.stream().sorted(Comparator.comparing(QuickFilter::getValue)).collect(Collectors.toList()); + return platforms.stream() + .sorted(Comparator.comparing(QuickFilter::getValue)) + .collect(Collectors.toList()); } /** - * Gets entity type quick filters from search aggregations. First, get source entity type quick filters - * from a prioritized list. Do the same for datathub entity types. + * Gets entity type quick filters from search aggregations. First, get source entity type quick + * filters from a prioritized list. Do the same for datathub entity types. */ - private List getEntityTypeQuickFilters(@Nonnull final AggregationMetadataArray aggregations) { + private List getEntityTypeQuickFilters( + @Nonnull final AggregationMetadataArray aggregations) { final List entityTypes = new ArrayList<>(); - final Optional entityAggregations = aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); + final Optional entityAggregations = + aggregations.stream().filter(agg -> agg.getName().equals(ENTITY_FILTER_NAME)).findFirst(); if (entityAggregations.isPresent()) { final List sourceEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, SOURCE_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_SOURCE_ENTITY_TYPES, + SOURCE_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(sourceEntityTypeFilters); final List dataHubEntityTypeFilters = - getQuickFiltersFromList(SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, DATAHUB_ENTITY_COUNT, entityAggregations.get()); + getQuickFiltersFromList( + SearchUtils.PRIORITIZED_DATAHUB_ENTITY_TYPES, + DATAHUB_ENTITY_COUNT, + entityAggregations.get()); entityTypes.addAll(dataHubEntityTypeFilters); } return entityTypes; } /** - * Create a quick filters list by looping over prioritized list and adding filters that exist until we reach the maxListSize defined + * Create a quick filters list by looping over prioritized list and adding filters that exist + * until we reach the maxListSize defined */ private List getQuickFiltersFromList( @Nonnull final List prioritizedList, final int maxListSize, - @Nonnull final AggregationMetadata entityAggregations - ) { + @Nonnull final AggregationMetadata entityAggregations) { final List entityTypes = new ArrayList<>(); - prioritizedList.forEach(entityType -> { - if (entityTypes.size() < maxListSize) { - final Optional entityFilter = entityAggregations.getFilterValues().stream().filter(val -> val.getValue().equals(entityType)).findFirst(); - if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { - entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); - } - } - }); + prioritizedList.forEach( + entityType -> { + if (entityTypes.size() < maxListSize) { + final Optional entityFilter = + entityAggregations.getFilterValues().stream() + .filter(val -> val.getValue().equals(entityType)) + .findFirst(); + if (entityFilter.isPresent() && entityFilter.get().getFacetCount() > 0) { + entityTypes.add(mapQuickFilter(ENTITY_FILTER_NAME, entityFilter.get())); + } + } + }); return entityTypes; } - private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final FilterValue filterValue) { + private QuickFilter mapQuickFilter( + @Nonnull final String field, @Nonnull final FilterValue filterValue) { final boolean isEntityTypeFilter = field.equals(ENTITY_FILTER_NAME); final QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); @@ -167,9 +197,7 @@ private QuickFilter mapQuickFilter(@Nonnull final String field, @Nonnull final F return quickFilter; } - /** - * If we're working with an entity type filter, we need to convert the value to an EntityType - */ + /** If we're working with an entity type filter, we need to convert the value to an EntityType */ public static String convertFilterValue(String filterValue, boolean isEntityType) { if (isEntityType) { return EntityTypeMapper.getType(filterValue).toString(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java index d576ffc8ca2807..742d1d170de64b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -24,13 +27,7 @@ import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossEntitiesResolver implements DataFetcher> { @@ -48,57 +45,80 @@ public CompletableFuture get(DataFetchingEnvironment environment) bindArgument(environment.getArgument("input"), ScrollAcrossEntitiesInput.class); final List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - final List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + final List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - // escape forward slash since it is a reserved character in Elasticsearch, default to * if blank/empty - final String sanitizedQuery = StringUtils.isNotBlank(input.getQuery()) - ? ResolverUtils.escapeForwardSlash(input.getQuery()) : "*"; + // escape forward slash since it is a reserved character in Elasticsearch, default to * if + // blank/empty + final String sanitizedQuery = + StringUtils.isNotBlank(input.getQuery()) + ? ResolverUtils.escapeForwardSlash(input.getQuery()) + : "*"; - @Nullable - final String scrollId = input.getScrollId(); + @Nullable final String scrollId = input.getScrollId(); final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; - final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - } + final Filter baseFilter = ResolverUtils.buildFilter(null, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + } - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, scrollId: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; - return UrnScrollResultsMapper.map(_entityClient.scrollAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - scrollId, - keepAlive, - count, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), e); - } - }); + return UrnScrollResultsMapper.map( + _entityClient.scrollAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + scrollId, + keepAlive, + count, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, searchAfter: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + scrollId, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), scrollId, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java index 78be1ac3096908..adab62c22bb724 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/ScrollAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.AndFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class ScrollAcrossLineageResolver @@ -53,55 +50,98 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final String scrollId = input.getScrollId() != null ? input.getScrollId() : null; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List filters = input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); - final List facetFilters = filters.stream() - .map(AndFilterInput::getAnd) - .flatMap(List::stream) - .collect(Collectors.toList()); + final List filters = + input.getOrFilters() != null ? input.getOrFilters() : new ArrayList<>(); + final List facetFilters = + filters.stream() + .map(AndFilterInput::getAnd) + .flatMap(List::stream) + .collect(Collectors.toList()); final Integer maxHops = getMaxHops(facetFilters); String keepAlive = input.getKeepAlive() != null ? input.getKeepAlive() : "5m"; @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); - SearchFlags searchFlags = null; - final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = new SearchFlags() - .setSkipCache(inputFlags.getSkipCache()) - .setFulltext(inputFlags.getFulltext()) - .setMaxAggValues(inputFlags.getMaxAggValues()); - } - return UrnScrollAcrossLineageResultsMapper.map( - _entityClient.scrollAcrossLineage(urn, resolvedDirection, entityNames, sanitizedQuery, - maxHops, ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), null, scrollId, - keepAlive, count, startTimeMillis, endTimeMillis, searchFlags, ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count); - throw new RuntimeException("Failed to execute scroll across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, scrollId, count), e); - } - }); + SearchFlags searchFlags = null; + final com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = + input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = + new SearchFlags() + .setSkipCache(inputFlags.getSkipCache()) + .setFulltext(inputFlags.getFulltext()) + .setMaxAggValues(inputFlags.getMaxAggValues()); + } + return UrnScrollAcrossLineageResultsMapper.map( + _entityClient.scrollAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + ResolverUtils.buildFilter(facetFilters, input.getOrFilters()), + null, + scrollId, + keepAlive, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute scroll across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count); + throw new RuntimeException( + "Failed to execute scroll across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + scrollId, + count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index 1022b25b3cd992..f8178e3b396cb5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.SearchAcrossEntitiesInput; @@ -19,13 +22,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossEntitiesResolver implements DataFetcher> { @@ -50,43 +47,65 @@ public CompletableFuture get(DataFetchingEnvironment environment) final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - return CompletableFuture.supplyAsync(() -> { - - final DataHubViewInfo maybeResolvedView = (input.getViewUrn() != null) - ? resolveView(_viewService, UrnUtils.getUrn(input.getViewUrn()), context.getAuthentication()) - : null; - - final Filter baseFilter = ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); - - SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); - SortCriterion sortCriterion = input.getSortInput() != null ? mapSortCriterion(input.getSortInput().getSortCriterion()) : null; - - try { - log.debug( - "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - - return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities( - maybeResolvedView != null - ? SearchUtils.intersectEntityTypes(entityNames, maybeResolvedView.getDefinition().getEntityTypes()) - : entityNames, - sanitizedQuery, - maybeResolvedView != null - ? SearchUtils.combineFilters(baseFilter, maybeResolvedView.getDefinition().getFilter()) - : baseFilter, - start, - count, - searchFlags, - sortCriterion, - ResolverUtils.getAuthentication(environment))); - } catch (Exception e) { - log.error( - "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity types %s, query %s, filters: %s, start: %s, count: %s", - input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + final DataHubViewInfo maybeResolvedView = + (input.getViewUrn() != null) + ? resolveView( + _viewService, + UrnUtils.getUrn(input.getViewUrn()), + context.getAuthentication()) + : null; + + final Filter baseFilter = + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()); + + SearchFlags searchFlags = mapInputFlags(input.getSearchFlags()); + SortCriterion sortCriterion = + input.getSortInput() != null + ? mapSortCriterion(input.getSortInput().getSortCriterion()) + : null; + + try { + log.debug( + "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + + return UrnSearchResultsMapper.map( + _entityClient.searchAcrossEntities( + maybeResolvedView != null + ? SearchUtils.intersectEntityTypes( + entityNames, maybeResolvedView.getDefinition().getEntityTypes()) + : entityNames, + sanitizedQuery, + maybeResolvedView != null + ? SearchUtils.combineFilters( + baseFilter, maybeResolvedView.getDefinition().getFilter()) + : baseFilter, + start, + count, + searchFlags, + sortCriterion, + ResolverUtils.getAuthentication(environment))); + } catch (Exception e) { + log.error( + "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", + input.getTypes(), + input.getQuery(), + input.getOrFilters(), + start, + count); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity types %s, query %s, filters: %s, start: %s, count: %s", + input.getTypes(), input.getQuery(), input.getOrFilters(), start, count), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index 9f489183f4af72..0f5d2d90ba0c29 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; @@ -25,13 +28,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - -/** - * Resolver responsible for resolving 'searchAcrossEntities' field of the Query type - */ +/** Resolver responsible for resolving 'searchAcrossEntities' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchAcrossLineageResolver @@ -54,76 +51,95 @@ public CompletableFuture get(DataFetchingEnvironment final LineageDirection lineageDirection = input.getDirection(); List entityTypes = - (input.getTypes() == null || input.getTypes().isEmpty()) ? SEARCHABLE_ENTITY_TYPES : input.getTypes(); - List entityNames = entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); + (input.getTypes() == null || input.getTypes().isEmpty()) + ? SEARCHABLE_ENTITY_TYPES + : input.getTypes(); + List entityNames = + entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); // escape forward slash since it is a reserved character in Elasticsearch - final String sanitizedQuery = input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; + final String sanitizedQuery = + input.getQuery() != null ? ResolverUtils.escapeForwardSlash(input.getQuery()) : null; final int start = input.getStart() != null ? input.getStart() : DEFAULT_START; final int count = input.getCount() != null ? input.getCount() : DEFAULT_COUNT; - final List filters = input.getFilters() != null ? input.getFilters() : new ArrayList<>(); + final List filters = + input.getFilters() != null ? input.getFilters() : new ArrayList<>(); final Integer maxHops = getMaxHops(filters); @Nullable - final Long startTimeMillis = input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); + final Long startTimeMillis = + input.getStartTimeMillis() == null ? null : input.getStartTimeMillis(); @Nullable final Long endTimeMillis = input.getEndTimeMillis() == null ? null : input.getEndTimeMillis(); com.linkedin.metadata.graph.LineageDirection resolvedDirection = com.linkedin.metadata.graph.LineageDirection.valueOf(lineageDirection.toString()); - return CompletableFuture.supplyAsync(() -> { - try { - log.debug( - "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, - resolvedDirection, - input.getTypes(), - input.getQuery(), - filters, - start, - count); - - final Filter filter = - ResolverUtils.buildFilter( + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), filters, - input.getOrFilters()); - SearchFlags searchFlags = null; - com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); - if (inputFlags != null) { - searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); - if (inputFlags.getSkipHighlighting() == null) { - searchFlags.setSkipHighlighting(true); - } - } else { - searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); - } - - return UrnSearchAcrossLineageResultsMapper.map( - _entityClient.searchAcrossLineage( + start, + count); + + final Filter filter = ResolverUtils.buildFilter(filters, input.getOrFilters()); + SearchFlags searchFlags = null; + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags = input.getSearchFlags(); + if (inputFlags != null) { + searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); + if (inputFlags.getSkipHighlighting() == null) { + searchFlags.setSkipHighlighting(true); + } + } else { + searchFlags = new SearchFlags().setFulltext(true).setSkipHighlighting(true); + } + + return UrnSearchAcrossLineageResultsMapper.map( + _entityClient.searchAcrossLineage( + urn, + resolvedDirection, + entityNames, + sanitizedQuery, + maxHops, + filter, + null, + start, + count, + startTimeMillis, + endTimeMillis, + searchFlags, + ResolverUtils.getAuthentication(environment))); + } catch (RemoteInvocationException e) { + log.error( + "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", urn, resolvedDirection, - entityNames, - sanitizedQuery, - maxHops, - filter, - null, + input.getTypes(), + input.getQuery(), + filters, start, - count, - startTimeMillis, - endTimeMillis, - searchFlags, - ResolverUtils.getAuthentication(environment))); - } catch (RemoteInvocationException e) { - log.error( - "Failed to execute search across relationships: source urn {}, direction {}, entity types {}, query {}, filters: {}, start: {}, count: {}", - urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count); - throw new RuntimeException("Failed to execute search across relationships: " + String.format( - "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", urn, - resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count), e); - } finally { - log.debug("Returning from search across lineage resolver"); - } - }); + count); + throw new RuntimeException( + "Failed to execute search across relationships: " + + String.format( + "source urn %s, direction %s, entity types %s, query %s, filters: %s, start: %s, count: %s", + urn, + resolvedDirection, + input.getTypes(), + input.getQuery(), + filters, + start, + count), + e); + } finally { + log.debug("Returning from search across lineage resolver"); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 0e66d6e601399c..68214238879237 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; + import com.linkedin.datahub.graphql.generated.SearchInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; @@ -15,17 +18,12 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; -import static com.linkedin.metadata.search.utils.SearchUtils.applyDefaultSearchFlags; - - -/** - * Resolver responsible for resolving the 'search' field of the Query type - */ +/** Resolver responsible for resolving the 'search' field of the Query type */ @Slf4j @RequiredArgsConstructor public class SearchResolver implements DataFetcher> { - private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = new SearchFlags() + private static final SearchFlags SEARCH_RESOLVER_DEFAULTS = + new SearchFlags() .setFulltext(true) .setMaxAggValues(20) .setSkipCache(false) @@ -54,22 +52,52 @@ public CompletableFuture get(DataFetchingEnvironment environment) searchFlags = applyDefaultSearchFlags(null, sanitizedQuery, SEARCH_RESOLVER_DEFAULTS); } - return CompletableFuture.supplyAsync(() -> { - try { - log.debug("Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); + return CompletableFuture.supplyAsync( + () -> { + try { + log.debug( + "Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); - return UrnSearchResultsMapper.map( - _entityClient.search(entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), - input.getOrFilters()), null, start, count, ResolverUtils.getAuthentication(environment), + return UrnSearchResultsMapper.map( + _entityClient.search( + entityName, + sanitizedQuery, + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), + null, + start, + count, + ResolverUtils.getAuthentication(environment), searchFlags)); - } catch (Exception e) { - log.error("Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags); - throw new RuntimeException( - "Failed to execute search: " + String.format("entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", - input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count, searchFlags), e); - } - }); + } catch (Exception e) { + log.error( + "Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}, searchFlags: {}", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags); + throw new RuntimeException( + "Failed to execute search: " + + String.format( + "entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s, searchFlags: %s", + input.getType(), + input.getQuery(), + input.getFilters(), + input.getOrFilters(), + start, + count, + searchFlags), + e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java index fb146ef72877d1..444ab4bcc3c3c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtils.java @@ -1,5 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; +import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -28,31 +44,11 @@ import lombok.extern.slf4j.Slf4j; import org.codehaus.plexus.util.CollectionUtils; -import static com.linkedin.metadata.Constants.CHART_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DASHBOARD_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATASET_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_FLOW_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_JOB_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DOMAIN_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_FEATURE_TABLE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_MODEL_GROUP_ENTITY_NAME; -import static com.linkedin.metadata.Constants.ML_PRIMARY_KEY_ENTITY_NAME; - - @Slf4j public class SearchUtils { - private SearchUtils() { - } + private SearchUtils() {} - /** - * Entities that are searched by default in Search Across Entities - */ + /** Entities that are searched by default in Search Across Entities */ public static final List SEARCHABLE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -76,10 +72,7 @@ private SearchUtils() { EntityType.DATA_PRODUCT, EntityType.NOTEBOOK); - - /** - * Entities that are part of autocomplete by default in Auto Complete Across Entities - */ + /** Entities that are part of autocomplete by default in Auto Complete Across Entities */ public static final List AUTO_COMPLETE_ENTITY_TYPES = ImmutableList.of( EntityType.DATASET, @@ -99,63 +92,78 @@ private SearchUtils() { EntityType.NOTEBOOK, EntityType.DATA_PRODUCT); - /** - * A prioritized list of source filter types used to generate quick filters - */ - public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = Stream.of( - DATASET_ENTITY_NAME, - DASHBOARD_ENTITY_NAME, - DATA_FLOW_ENTITY_NAME, - DATA_JOB_ENTITY_NAME, - CHART_ENTITY_NAME, - CONTAINER_ENTITY_NAME, - ML_MODEL_ENTITY_NAME, - ML_MODEL_GROUP_ENTITY_NAME, - ML_FEATURE_ENTITY_NAME, - ML_FEATURE_TABLE_ENTITY_NAME, - ML_PRIMARY_KEY_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** Entities that are part of browse by default */ + public static final List BROWSE_ENTITY_TYPES = + ImmutableList.of( + EntityType.DATASET, + EntityType.DASHBOARD, + EntityType.CHART, + EntityType.CONTAINER, + EntityType.MLMODEL, + EntityType.MLMODEL_GROUP, + EntityType.MLFEATURE_TABLE, + EntityType.DATA_FLOW, + EntityType.DATA_JOB, + EntityType.NOTEBOOK); - /** - * A prioritized list of DataHub filter types used to generate quick filters - */ - public static final List PRIORITIZED_DATAHUB_ENTITY_TYPES = Stream.of( - DOMAIN_ENTITY_NAME, - GLOSSARY_TERM_ENTITY_NAME, - CORP_GROUP_ENTITY_NAME, - CORP_USER_ENTITY_NAME - ).map(String::toLowerCase).collect(Collectors.toList()); + /** A prioritized list of source filter types used to generate quick filters */ + public static final List PRIORITIZED_SOURCE_ENTITY_TYPES = + Stream.of( + DATASET_ENTITY_NAME, + DASHBOARD_ENTITY_NAME, + DATA_FLOW_ENTITY_NAME, + DATA_JOB_ENTITY_NAME, + CHART_ENTITY_NAME, + CONTAINER_ENTITY_NAME, + ML_MODEL_ENTITY_NAME, + ML_MODEL_GROUP_ENTITY_NAME, + ML_FEATURE_ENTITY_NAME, + ML_FEATURE_TABLE_ENTITY_NAME, + ML_PRIMARY_KEY_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); + + /** A prioritized list of DataHub filter types used to generate quick filters */ + public static final List PRIORITIZED_DATAHUB_ENTITY_TYPES = + Stream.of( + DOMAIN_ENTITY_NAME, + GLOSSARY_TERM_ENTITY_NAME, + CORP_GROUP_ENTITY_NAME, + CORP_USER_ENTITY_NAME) + .map(String::toLowerCase) + .collect(Collectors.toList()); /** - * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link Filter} - * in disjunctive normal form. + * Combines two {@link Filter} instances in a conjunction and returns a new instance of {@link + * Filter} in disjunctive normal form. * * @param baseFilter the filter to apply the view to * @param viewFilter the view filter, null if it doesn't exist - * * @return a new instance of {@link Filter} representing the applied view. */ @Nonnull - public static Filter combineFilters(@Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { - final Filter finalBaseFilter = baseFilter == null - ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) - : baseFilter; + public static Filter combineFilters( + @Nullable final Filter baseFilter, @Nonnull final Filter viewFilter) { + final Filter finalBaseFilter = + baseFilter == null + ? new Filter().setOr(new ConjunctiveCriterionArray(Collections.emptyList())) + : baseFilter; // Join the filter conditions in Disjunctive Normal Form. return combineFiltersInConjunction(finalBaseFilter, viewFilter); } /** - * Returns the intersection of two sets of entity types. (Really just string lists). - * If either is empty, consider the entity types list to mean "all" (take the other set). + * Returns the intersection of two sets of entity types. (Really just string lists). If either is + * empty, consider the entity types list to mean "all" (take the other set). * * @param baseEntityTypes the entity types to apply the view to * @param viewEntityTypes the view info, null if it doesn't exist - * * @return the intersection of the two input sets */ @Nonnull - public static List intersectEntityTypes(@Nonnull final List baseEntityTypes, @Nonnull final List viewEntityTypes) { + public static List intersectEntityTypes( + @Nonnull final List baseEntityTypes, @Nonnull final List viewEntityTypes) { if (baseEntityTypes.isEmpty()) { return viewEntityTypes; } @@ -171,126 +179,29 @@ public static List intersectEntityTypes(@Nonnull final List base * * @param filter1 the first filter in the pair * @param filter2 the second filter in the pair - * - * This method supports either Filter format, where the "or" field is used, instead - * of criteria. If the criteria filter is used, then it will be converted into an "OR" before - * returning the new filter. - * + *

This method supports either Filter format, where the "or" field is used, instead of + * criteria. If the criteria filter is used, then it will be converted into an "OR" before + * returning the new filter. * @return the result of joining the 2 filters in a conjunction (AND) - * - * How does it work? It basically cross-products the conjunctions inside of each Filter clause. - * - * Example Inputs: - * filter1 -> - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * } - * ] - * } - * ] - * } - * filter2 -> - * { - * or: [ - * { - * and: [ - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * }, - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term2"] - * } - * ] - * } - * ] - * } - * Example Output: - * { - * or: [ - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: tags, - * condition: EQUAL, - * values: ["urn:li:tag:tag"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: domain, - * condition: EQUAL, - * values: ["urn:li:domain:domain"] - * } - * ] - * }, - * { - * and: [ - * { - * field: glossaryTerm, - * condition: EQUAL, - * values: ["urn:li:glossaryTerm:term"] - * }, - * { - * field: glossaryTerms, - * condition: EQUAL, - * values: ["urn:li:glosaryTerm:term2"] - * } - * ] - * }, - * ] - * } + *

How does it work? It basically cross-products the conjunctions inside of each Filter + * clause. + *

Example Inputs: filter1 -> { or: [ { and: [ { field: tags, condition: EQUAL, values: + * ["urn:li:tag:tag"] } ] }, { and: [ { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] } ] } ] } filter2 -> { or: [ { and: [ { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] }, ] }, { and: [ { field: glossaryTerms, + * condition: EQUAL, values: ["urn:li:glossaryTerm:term2"] } ] } ] } Example Output: { or: [ { + * and: [ { field: tags, condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: domain, + * condition: EQUAL, values: ["urn:li:domain:domain"] } ] }, { and: [ { field: tags, + * condition: EQUAL, values: ["urn:li:tag:tag"] }, { field: glossaryTerms, condition: EQUAL, + * values: ["urn:li:glosaryTerm:term2"] } ] }, { and: [ { field: glossaryTerm, condition: + * EQUAL, values: ["urn:li:glossaryTerm:term"] }, { field: domain, condition: EQUAL, values: + * ["urn:li:domain:domain"] } ] }, { and: [ { field: glossaryTerm, condition: EQUAL, values: + * ["urn:li:glossaryTerm:term"] }, { field: glossaryTerms, condition: EQUAL, values: + * ["urn:li:glosaryTerm:term2"] } ] }, ] } */ @Nonnull - private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, @Nonnull final Filter filter2) { + private static Filter combineFiltersInConjunction( + @Nonnull final Filter filter1, @Nonnull final Filter filter2) { final Filter finalFilter1 = convertToV2Filter(filter1); final Filter finalFilter2 = convertToV2Filter(filter2); @@ -310,7 +221,8 @@ private static Filter combineFiltersInConjunction(@Nonnull final Filter filter1, for (ConjunctiveCriterion conjunction2 : finalFilter2.getOr()) { final List joinedCriterion = new ArrayList<>(conjunction1.getAnd()); joinedCriterion.addAll(conjunction2.getAnd()); - ConjunctiveCriterion newConjunction = new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); + ConjunctiveCriterion newConjunction = + new ConjunctiveCriterion().setAnd(new CriterionArray(joinedCriterion)); newDisjunction.add(newConjunction); } } @@ -325,38 +237,45 @@ private static Filter convertToV2Filter(@Nonnull Filter filter) { } else if (filter.hasCriteria()) { // Convert criteria to an OR return new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(filter.getCriteria()) - ))); + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(filter.getCriteria())))); } throw new IllegalArgumentException( - String.format("Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", filter)); + String.format( + "Illegal filter provided! Neither 'or' nor 'criteria' fields were populated for filter %s", + filter)); } /** - * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the specified - * urn cannot be found. + * Attempts to resolve a View by urn. Throws {@link IllegalArgumentException} if a View with the + * specified urn cannot be found. */ - public static DataHubViewInfo resolveView(@Nonnull ViewService viewService, @Nonnull final Urn viewUrn, + public static DataHubViewInfo resolveView( + @Nonnull ViewService viewService, + @Nonnull final Urn viewUrn, @Nonnull final Authentication authentication) { try { DataHubViewInfo maybeViewInfo = viewService.getViewInfo(viewUrn, authentication); if (maybeViewInfo == null) { - log.warn(String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); + log.warn( + String.format("Failed to resolve View with urn %s. View does not exist!", viewUrn)); } return maybeViewInfo; } catch (Exception e) { - throw new RuntimeException(String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), e); + throw new RuntimeException( + String.format("Caught exception while attempting to resolve View with URN %s", viewUrn), + e); } } // Assumption is that filter values for degree are either null, 3+, 2, or 1. public static Integer getMaxHops(List filters) { - Set degreeFilterValues = filters.stream() - .filter(filter -> filter.getField().equals("degree")) - .flatMap(filter -> filter.getValues().stream()) - .collect(Collectors.toSet()); + Set degreeFilterValues = + filters.stream() + .filter(filter -> filter.getField().equals("degree")) + .flatMap(filter -> filter.getValues().stream()) + .collect(Collectors.toSet()); Integer maxHops = null; if (!degreeFilterValues.contains("3+")) { if (degreeFilterValues.contains("2")) { @@ -368,7 +287,8 @@ public static Integer getMaxHops(List filters) { return maxHops; } - public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { + public static SearchFlags mapInputFlags( + com.linkedin.datahub.graphql.generated.SearchFlags inputFlags) { SearchFlags searchFlags = null; if (inputFlags != null) { searchFlags = SearchFlagsInputMapper.INSTANCE.apply(inputFlags); @@ -376,7 +296,8 @@ public static SearchFlags mapInputFlags(com.linkedin.datahub.graphql.generated.S return searchFlags; } - public static SortCriterion mapSortCriterion(com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { + public static SortCriterion mapSortCriterion( + com.linkedin.datahub.graphql.generated.SortCriterion sortCriterion) { SortCriterion result = new SortCriterion(); result.setField(sortCriterion.getField()); result.setOrder(SortOrder.valueOf(sortCriterion.getSortOrder().name())); @@ -388,4 +309,4 @@ public static List getEntityNames(List inputTypes) { (inputTypes == null || inputTypes.isEmpty()) ? SEARCHABLE_ENTITY_TYPES : inputTypes; return entityTypes.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java index 8c21277b66a69f..a7e0d93c7bd1e5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -16,58 +18,61 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating the authenticated user's View-specific settings. - */ +/** Resolver responsible for updating the authenticated user's View-specific settings. */ @Slf4j @RequiredArgsConstructor -public class UpdateCorpUserViewsSettingsResolver implements DataFetcher> { +public class UpdateCorpUserViewsSettingsResolver + implements DataFetcher> { private final SettingsService _settingsService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final UpdateCorpUserViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); + final UpdateCorpUserViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateCorpUserViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - try { + return CompletableFuture.supplyAsync( + () -> { + try { - final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); + final Urn userUrn = UrnUtils.getUrn(context.getActorUrn()); - final CorpUserSettings maybeSettings = _settingsService.getCorpUserSettings( - userUrn, - context.getAuthentication() - ); + final CorpUserSettings maybeSettings = + _settingsService.getCorpUserSettings(userUrn, context.getAuthentication()); - final CorpUserSettings newSettings = maybeSettings == null - ? new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) - : maybeSettings; + final CorpUserSettings newSettings = + maybeSettings == null + ? new CorpUserSettings() + .setAppearance( + new CorpUserAppearanceSettings().setShowSimplifiedHomepage(false)) + : maybeSettings; - // Patch the new corp user settings. This does a R-M-F. - updateCorpUserSettings(newSettings, input); + // Patch the new corp user settings. This does a R-M-F. + updateCorpUserSettings(newSettings, input); - _settingsService.updateCorpUserSettings( - userUrn, - newSettings, - context.getAuthentication() - ); - return true; - } catch (Exception e) { - log.error("Failed to perform user view settings update against input {}, {}", input.toString(), e.getMessage()); - throw new RuntimeException(String.format("Failed to perform update to user view settings against input %s", input.toString()), e); - } - }); + _settingsService.updateCorpUserSettings( + userUrn, newSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + log.error( + "Failed to perform user view settings update against input {}, {}", + input.toString(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to perform update to user view settings against input %s", + input.toString()), + e); + } + }); } private static void updateCorpUserSettings( @Nonnull final CorpUserSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - final CorpUserViewsSettings newViewSettings = settings.hasViews() - ? settings.getViews() - : new CorpUserViewsSettings(); + final CorpUserViewsSettings newViewSettings = + settings.hasViews() ? settings.getViews() : new CorpUserViewsSettings(); updateCorpUserViewsSettings(newViewSettings, input); settings.setViews(newViewSettings); } @@ -75,9 +80,8 @@ private static void updateCorpUserSettings( private static void updateCorpUserViewsSettings( @Nonnull final CorpUserViewsSettings settings, @Nonnull final UpdateCorpUserViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java index f1aba3d9247c58..208e8717432699 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolver.java @@ -14,11 +14,11 @@ /** * Retrieves the Global Settings related to the Views feature. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + *

This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ @Slf4j -public class GlobalViewsSettingsResolver implements - DataFetcher> { +public class GlobalViewsSettingsResolver + implements DataFetcher> { private final SettingsService _settingsService; @@ -27,25 +27,29 @@ public GlobalViewsSettingsResolver(final SettingsService settingsService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - try { - final GlobalSettingsInfo globalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); - return globalSettings != null && globalSettings.hasViews() - ? mapGlobalViewsSettings(globalSettings.getViews()) - : new GlobalViewsSettings(); - } catch (Exception e) { - throw new RuntimeException("Failed to retrieve Global Views Settings", e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final GlobalSettingsInfo globalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); + return globalSettings != null && globalSettings.hasViews() + ? mapGlobalViewsSettings(globalSettings.getViews()) + : new GlobalViewsSettings(); + } catch (Exception e) { + throw new RuntimeException("Failed to retrieve Global Views Settings", e); + } + }); } - private static GlobalViewsSettings mapGlobalViewsSettings(@Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { + private static GlobalViewsSettings mapGlobalViewsSettings( + @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings) { final GlobalViewsSettings result = new GlobalViewsSettings(); if (settings.hasDefaultView()) { result.setDefaultView(settings.getDefaultView().toString()); } return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java index c90ec04b3a2dfc..7d37683785fc2a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,13 +17,10 @@ import java.util.concurrent.CompletableFuture; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /** * Resolver responsible for updating the Global Views settings. * - * This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. + *

This capability requires the 'MANAGE_GLOBAL_VIEWS' Platform Privilege. */ public class UpdateGlobalViewsSettingsResolver implements DataFetcher> { @@ -32,45 +31,50 @@ public UpdateGlobalViewsSettingsResolver(@Nonnull final SettingsService settings } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final UpdateGlobalViewsSettingsInput input = bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); + final UpdateGlobalViewsSettingsInput input = + bindArgument(environment.getArgument("input"), UpdateGlobalViewsSettingsInput.class); - return CompletableFuture.supplyAsync(() -> { - if (AuthorizationUtils.canManageGlobalViews(context)) { - try { - // First, fetch the existing global settings. This does a R-M-F. - final GlobalSettingsInfo maybeGlobalSettings = _settingsService.getGlobalSettings(context.getAuthentication()); + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageGlobalViews(context)) { + try { + // First, fetch the existing global settings. This does a R-M-F. + final GlobalSettingsInfo maybeGlobalSettings = + _settingsService.getGlobalSettings(context.getAuthentication()); - final GlobalSettingsInfo newGlobalSettings = maybeGlobalSettings != null - ? maybeGlobalSettings - : new GlobalSettingsInfo(); + final GlobalSettingsInfo newGlobalSettings = + maybeGlobalSettings != null ? maybeGlobalSettings : new GlobalSettingsInfo(); - final GlobalViewsSettings newGlobalViewsSettings = newGlobalSettings.hasViews() - ? newGlobalSettings.getViews() - : new GlobalViewsSettings(); + final GlobalViewsSettings newGlobalViewsSettings = + newGlobalSettings.hasViews() + ? newGlobalSettings.getViews() + : new GlobalViewsSettings(); - // Next, patch the global views settings. - updateViewsSettings(newGlobalViewsSettings, input); - newGlobalSettings.setViews(newGlobalViewsSettings); + // Next, patch the global views settings. + updateViewsSettings(newGlobalViewsSettings, input); + newGlobalSettings.setViews(newGlobalViewsSettings); - // Finally, write back to GMS. - _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update global view settings! %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Finally, write back to GMS. + _settingsService.updateGlobalSettings(newGlobalSettings, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update global view settings! %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static void updateViewsSettings( @Nonnull final com.linkedin.settings.global.GlobalViewsSettings settings, @Nonnull final UpdateGlobalViewsSettingsInput input) { - settings.setDefaultView(input.getDefaultView() != null - ? UrnUtils.getUrn(input.getDefaultView()) - : null, + settings.setDefaultView( + input.getDefaultView() != null ? UrnUtils.getUrn(input.getDefaultView()) : null, SetMode.REMOVE_IF_NULL); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java index 0e93cdfb231fa4..9ea6cba0f211a1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.utils.EntityKeyUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -27,59 +31,64 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.utils.EntityKeyUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchGetStepStatesResolver implements DataFetcher> { +public class BatchGetStepStatesResolver + implements DataFetcher> { private final EntityClient _entityClient; @Override - public CompletableFuture get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); final BatchGetStepStatesInput input = bindArgument(environment.getArgument("input"), BatchGetStepStatesInput.class); - return CompletableFuture.supplyAsync(() -> { - Map urnsToIdsMap; - Set urns; - Map entityResponseMap; + return CompletableFuture.supplyAsync( + () -> { + Map urnsToIdsMap; + Set urns; + Map entityResponseMap; - try { - urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); - urns = urnsToIdsMap.keySet(); - entityResponseMap = _entityClient.batchGetV2(DATAHUB_STEP_STATE_ENTITY_NAME, urns, - ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), authentication); - } catch (Exception e) { - throw new RuntimeException(e); - } + try { + urnsToIdsMap = buildUrnToIdMap(input.getIds(), authentication); + urns = urnsToIdsMap.keySet(); + entityResponseMap = + _entityClient.batchGetV2( + DATAHUB_STEP_STATE_ENTITY_NAME, + urns, + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME), + authentication); + } catch (Exception e) { + throw new RuntimeException(e); + } - final Map stepStatePropertiesMap = new HashMap<>(); - for (Map.Entry entry : entityResponseMap.entrySet()) { - final Urn urn = entry.getKey(); - final DataHubStepStateProperties stepStateProperties = getStepStateProperties(urn, entry.getValue()); - if (stepStateProperties != null) { - stepStatePropertiesMap.put(urn, stepStateProperties); - } - } + final Map stepStatePropertiesMap = new HashMap<>(); + for (Map.Entry entry : entityResponseMap.entrySet()) { + final Urn urn = entry.getKey(); + final DataHubStepStateProperties stepStateProperties = + getStepStateProperties(urn, entry.getValue()); + if (stepStateProperties != null) { + stepStatePropertiesMap.put(urn, stepStateProperties); + } + } - final List results = stepStatePropertiesMap.entrySet() - .stream() - .map(entry -> buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) - .collect(Collectors.toList()); - final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); - result.setResults(results); - return result; - }); + final List results = + stepStatePropertiesMap.entrySet().stream() + .map( + entry -> + buildStepStateResult(urnsToIdsMap.get(entry.getKey()), entry.getValue())) + .collect(Collectors.toList()); + final BatchGetStepStatesResult result = new BatchGetStepStatesResult(); + result.setResults(results); + return result; + }); } @Nonnull - private Map buildUrnToIdMap(@Nonnull final List ids, @Nonnull final Authentication authentication) + private Map buildUrnToIdMap( + @Nonnull final List ids, @Nonnull final Authentication authentication) throws RemoteInvocationException { final Map urnToIdMap = new HashMap<>(); for (final String id : ids) { @@ -99,37 +108,37 @@ private Urn getStepStateUrn(@Nonnull final String id) { } @Nullable - private DataHubStepStateProperties getStepStateProperties(@Nonnull final Urn urn, - @Nonnull final EntityResponse entityResponse) { + private DataHubStepStateProperties getStepStateProperties( + @Nonnull final Urn urn, @Nonnull final EntityResponse entityResponse) { final EnvelopedAspectMap aspectMap = entityResponse.getAspects(); // If aspect is not present, log the error and return null. if (!aspectMap.containsKey(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME)) { log.error("Failed to find step state properties for urn: " + urn); return null; } - return new DataHubStepStateProperties(aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); + return new DataHubStepStateProperties( + aspectMap.get(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME).getValue().data()); } @Nonnull - private StepStateResult buildStepStateResult(@Nonnull final String id, - @Nonnull final DataHubStepStateProperties stepStateProperties) { + private StepStateResult buildStepStateResult( + @Nonnull final String id, @Nonnull final DataHubStepStateProperties stepStateProperties) { final StepStateResult result = new StepStateResult(); result.setId(id); - final List mappedProperties = stepStateProperties - .getProperties() - .entrySet() - .stream() - .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) - .collect(Collectors.toList()); + final List mappedProperties = + stepStateProperties.getProperties().entrySet().stream() + .map(entry -> buildStringMapEntry(entry.getKey(), entry.getValue())) + .collect(Collectors.toList()); result.setProperties(mappedProperties); return result; } @Nonnull - private StringMapEntry buildStringMapEntry(@Nonnull final String key, @Nonnull final String value) { + private StringMapEntry buildStringMapEntry( + @Nonnull final String key, @Nonnull final String value) { final StringMapEntry entry = new StringMapEntry(); entry.setKey(key); entry.setValue(value); return entry; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java index e4c21207ddd344..23d77ebba7457a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolver.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; +import static com.linkedin.metadata.entity.AspectUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -25,19 +29,15 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; -import static com.linkedin.metadata.entity.AspectUtils.*; - - @Slf4j @RequiredArgsConstructor -public class BatchUpdateStepStatesResolver implements DataFetcher> { +public class BatchUpdateStepStatesResolver + implements DataFetcher> { private final EntityClient _entityClient; @Override - public CompletableFuture get(@Nonnull final DataFetchingEnvironment environment) - throws Exception { + public CompletableFuture get( + @Nonnull final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); @@ -46,20 +46,23 @@ public CompletableFuture get(@Nonnull final DataFet final List states = input.getStates(); final String actorUrnStr = authentication.getActor().toUrnStr(); - return CompletableFuture.supplyAsync(() -> { - final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); - final AuditStamp auditStamp = new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); - final List results = states - .stream() - .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) - .collect(Collectors.toList()); - final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); - result.setResults(results); - return result; - }); + return CompletableFuture.supplyAsync( + () -> { + final Urn actorUrn = UrnUtils.getUrn(actorUrnStr); + final AuditStamp auditStamp = + new AuditStamp().setActor(actorUrn).setTime(System.currentTimeMillis()); + final List results = + states.stream() + .map(state -> buildUpdateStepStateResult(state, auditStamp, authentication)) + .collect(Collectors.toList()); + final BatchUpdateStepStatesResult result = new BatchUpdateStepStatesResult(); + result.setResults(results); + return result; + }); } - private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStateInput state, + private UpdateStepStateResult buildUpdateStepStateResult( + @Nonnull final StepStateInput state, @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final String id = state.getId(); @@ -70,19 +73,27 @@ private UpdateStepStateResult buildUpdateStepStateResult(@Nonnull final StepStat return updateStepStateResult; } - private boolean updateStepState(@Nonnull final String id, - @Nonnull final List inputProperties, @Nonnull final AuditStamp auditStamp, + private boolean updateStepState( + @Nonnull final String id, + @Nonnull final List inputProperties, + @Nonnull final AuditStamp auditStamp, @Nonnull final Authentication authentication) { final Map properties = - inputProperties.stream().collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); + inputProperties.stream() + .collect(Collectors.toMap(StringMapEntryInput::getKey, StringMapEntryInput::getValue)); try { final DataHubStepStateKey stepStateKey = new DataHubStepStateKey().setId(id); final DataHubStepStateProperties stepStateProperties = - new DataHubStepStateProperties().setProperties(new StringMap(properties)).setLastModified(auditStamp); + new DataHubStepStateProperties() + .setProperties(new StringMap(properties)) + .setLastModified(auditStamp); final MetadataChangeProposal proposal = - buildMetadataChangeProposal(DATAHUB_STEP_STATE_ENTITY_NAME, stepStateKey, - DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, stepStateProperties); + buildMetadataChangeProposal( + DATAHUB_STEP_STATE_ENTITY_NAME, + stepStateKey, + DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, + stepStateProperties); _entityClient.ingestProposal(proposal, authentication, false); return true; } catch (Exception e) { @@ -90,4 +101,4 @@ private boolean updateStepState(@Nonnull final String id, return false; } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java index 9ee24e6941017d..153c95c697a774 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -22,14 +27,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS privilege. + * Resolver used for creating a new Tag on DataHub. Requires the CREATE_TAG or MANAGE_TAGS + * privilege. */ @Slf4j @RequiredArgsConstructor @@ -42,43 +42,58 @@ public class CreateTagResolver implements DataFetcher> public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); - final CreateTagInput input = bindArgument(environment.getArgument("input"), CreateTagInput.class); - - return CompletableFuture.supplyAsync(() -> { + final CreateTagInput input = + bindArgument(environment.getArgument("input"), CreateTagInput.class); - if (!AuthorizationUtils.canCreateTags(context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + return CompletableFuture.supplyAsync( + () -> { + if (!AuthorizationUtils.canCreateTags(context)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - try { - // Create the Tag Key - final TagKey key = new TagKey(); + try { + // Create the Tag Key + final TagKey key = new TagKey(); - // Take user provided id OR generate a random UUID for the Tag. - final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); - key.setName(id); + // Take user provided id OR generate a random UUID for the Tag. + final String id = input.getId() != null ? input.getId() : UUID.randomUUID().toString(); + key.setName(id); - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), context.getAuthentication())) { - throw new IllegalArgumentException("This Tag already exists!"); - } + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TAG_ENTITY_NAME), + context.getAuthentication())) { + throw new IllegalArgumentException("This Tag already exists!"); + } - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); - String tagUrn = _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; - if (!_entityService.exists(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { - log.warn("Technical owner does not exist, defaulting to None ownership."); - ownershipType = OwnershipType.NONE; - } + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, mapTagProperties(input)); + String tagUrn = + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + OwnershipType ownershipType = OwnershipType.TECHNICAL_OWNER; + if (!_entityService.exists( + UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name())))) { + log.warn("Technical owner does not exist, defaulting to None ownership."); + ownershipType = OwnershipType.NONE; + } - OwnerUtils.addCreatorAsOwner(context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); - return tagUrn; - } catch (Exception e) { - log.error("Failed to create Tag with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); - throw new RuntimeException(String.format("Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), e); - } - }); + OwnerUtils.addCreatorAsOwner( + context, tagUrn, OwnerEntityType.CORP_USER, ownershipType, _entityService); + return tagUrn; + } catch (Exception e) { + log.error( + "Failed to create Tag with id: {}, name: {}: {}", + input.getId(), + input.getName(), + e.getMessage()); + throw new RuntimeException( + String.format( + "Failed to create Tag with id: %s, name: %s", input.getId(), input.getName()), + e); + } + }); } private TagProperties mapTagProperties(final CreateTagInput input) { @@ -87,4 +102,4 @@ private TagProperties mapTagProperties(final CreateTagInput input) { result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java index e6c3cf49df8db4..c5b86b013103c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolver.java @@ -11,10 +11,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp Group - */ +/** Resolver responsible for hard deleting a particular DataHub Corp Group */ @Slf4j public class DeleteTagResolver implements DataFetcher> { @@ -25,33 +22,41 @@ public DeleteTagResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String tagUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(tagUrn); - return CompletableFuture.supplyAsync(() -> { - - if (AuthorizationUtils.canManageTags(context) || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { + if (AuthorizationUtils.canManageTags(context) + || AuthorizationUtils.canDeleteEntity(UrnUtils.getUrn(tagUrn), context)) { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for Tag with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format( - "Caught exception while attempting to clear all entity references for Tag with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against domain with urn %s", tagUrn), e); } - }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against domain with urn %s", tagUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java index e2aa5905be8bd3..7b9290b4532b58 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolver.java @@ -1,11 +1,14 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.resolvers.AuthUtils; import com.linkedin.entity.client.EntityClient; @@ -21,19 +24,17 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS privilege for a particular asset. + * Resolver used for updating the Domain associated with a Metadata Asset. Requires the EDIT_DOMAINS + * privilege for a particular asset. */ @Slf4j @RequiredArgsConstructor public class SetTagColorResolver implements DataFetcher> { private final EntityClient _entityClient; - private final EntityService _entityService; // TODO: Remove this when 'exists' added to EntityClient + private final EntityService + _entityService; // TODO: Remove this when 'exists' added to EntityClient @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -42,48 +43,55 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw final Urn tagUrn = Urn.createFromString(environment.getArgument("urn")); final String colorHex = environment.getArgument("colorHex"); - return CompletableFuture.supplyAsync(() -> { - - // If user is not authorized, then throw exception. - if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - // If tag does not exist, then throw exception. - if (!_entityService.exists(tagUrn)) { - throw new IllegalArgumentException( - String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); - } - - try { - TagProperties tagProperties = (TagProperties) EntityUtils.getAspectFromEntity( - tagUrn.toString(), - TAG_PROPERTIES_ASPECT_NAME, - _entityService, - null); - - if (tagProperties == null) { - throw new IllegalArgumentException("Failed to set tag color. Tag properties does not yet exist!"); - } - - tagProperties.setColorHex(colorHex); - - // Update the TagProperties aspect. - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - return true; - } catch (Exception e) { - log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); - throw new RuntimeException(String.format("Failed to set color for Tag with urn %s", tagUrn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + + // If user is not authorized, then throw exception. + if (!isAuthorizedToSetTagColor(environment.getContext(), tagUrn)) { + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + // If tag does not exist, then throw exception. + if (!_entityService.exists(tagUrn)) { + throw new IllegalArgumentException( + String.format("Failed to set Tag %s color. Tag does not exist.", tagUrn)); + } + + try { + TagProperties tagProperties = + (TagProperties) + EntityUtils.getAspectFromEntity( + tagUrn.toString(), TAG_PROPERTIES_ASPECT_NAME, _entityService, null); + + if (tagProperties == null) { + throw new IllegalArgumentException( + "Failed to set tag color. Tag properties does not yet exist!"); + } + + tagProperties.setColorHex(colorHex); + + // Update the TagProperties aspect. + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + tagUrn, TAG_PROPERTIES_ASPECT_NAME, tagProperties); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + return true; + } catch (Exception e) { + log.error("Failed to set color for Tag with urn {}: {}", tagUrn, e.getMessage()); + throw new RuntimeException( + String.format("Failed to set color for Tag with urn %s", tagUrn), e); + } + }); } public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, Urn entityUrn) { - final DisjunctivePrivilegeGroup orPrivilegeGroups = new DisjunctivePrivilegeGroup(ImmutableList.of( - AuthUtils.ALL_PRIVILEGES_GROUP, - new ConjunctivePrivilegeGroup(ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())) - )); + final DisjunctivePrivilegeGroup orPrivilegeGroups = + new DisjunctivePrivilegeGroup( + ImmutableList.of( + AuthUtils.ALL_PRIVILEGES_GROUP, + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_TAG_COLOR_PRIVILEGE.getType())))); return AuthorizationUtils.isAuthorized( context.getAuthorizer(), @@ -92,4 +100,4 @@ public static boolean isAuthorizedToSetTagColor(@Nonnull QueryContext context, U entityUrn.toString(), orPrivilegeGroups); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java index 14ae9f96eb6831..b12b345a7b2116 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,15 +20,7 @@ import java.util.UUID; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Creates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Creates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class CreateTestResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -36,39 +33,44 @@ public CreateTestResolver(final EntityClient entityClient) { public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - final CreateTestInput input = bindArgument(environment.getArgument("input"), CreateTestInput.class); - - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - - try { - - // Create new test - // Since we are creating a new Test, we need to generate a unique UUID. - final UUID uuid = UUID.randomUUID(); - final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); - - // Create the Ingestion source key - final TestKey key = new TestKey(); - key.setId(uuidStr); - - if (_entityClient.exists(EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), - authentication)) { - throw new IllegalArgumentException("This Test already exists!"); + final CreateTestInput input = + bindArgument(environment.getArgument("input"), CreateTestInput.class); + + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + + try { + + // Create new test + // Since we are creating a new Test, we need to generate a unique UUID. + final UUID uuid = UUID.randomUUID(); + final String uuidStr = input.getId() == null ? uuid.toString() : input.getId(); + + // Create the Ingestion source key + final TestKey key = new TestKey(); + key.setId(uuidStr); + + if (_entityClient.exists( + EntityKeyUtils.convertEntityKeyToUrn(key, TEST_ENTITY_NAME), authentication)) { + throw new IllegalArgumentException("This Test already exists!"); + } + + // Create the Test info. + final TestInfo info = mapCreateTestInput(input); + + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithKey( + key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } } - - // Create the Test info. - final TestInfo info = mapCreateTestInput(input); - - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithKey(key, TEST_ENTITY_NAME, TEST_INFO_ASPECT_NAME, info); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapCreateTestInput(final CreateTestInput input) { @@ -79,5 +81,4 @@ private static TestInfo mapCreateTestInput(final CreateTestInput input) { result.setDefinition(mapDefinition(input.getDefinition())); return result; } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java index e0c878dc652bd7..6bc7e479b305c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -7,8 +9,6 @@ import graphql.schema.DataFetcher; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - /** * Resolver responsible for hard deleting a particular DataHub Test. Requires MANAGE_TESTS @@ -23,20 +23,24 @@ public DeleteTestResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String testUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(testUrn); - return CompletableFuture.supplyAsync(() -> { - if (canManageTests(context)) { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against Test with urn %s", testUrn), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + try { + _entityClient.deleteEntity(urn, context.getAuthentication()); + return true; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against Test with urn %s", testUrn), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java index a1e1e48aae8473..f345d9ceb21e52 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolver.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.ListTestsInput; import com.linkedin.datahub.graphql.generated.ListTestsResult; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.SearchFlags; @@ -20,12 +23,9 @@ import java.util.List; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; - - /** - * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform privilege. + * Resolver used for listing all Tests defined within DataHub. Requires the MANAGE_DOMAINS platform + * privilege. */ public class ListTestsResolver implements DataFetcher> { @@ -39,45 +39,50 @@ public ListTestsResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { - final ListTestsInput input = bindArgument(environment.getArgument("input"), ListTestsInput.class); - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? "" : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { + final ListTestsInput input = + bindArgument(environment.getArgument("input"), ListTestsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? "" : input.getQuery(); - try { - // First, get all group Urns. - final SearchResult gmsResult = _entityClient.search( - Constants.TEST_ENTITY_NAME, - query, - Collections.emptyMap(), - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + try { + // First, get all group Urns. + final SearchResult gmsResult = + _entityClient.search( + Constants.TEST_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Now that we have entities we can bind this to a result. - final ListTestsResult result = new ListTestsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setTests(mapUnresolvedTests(gmsResult.getEntities())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list tests", e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + // Now that we have entities we can bind this to a result. + final ListTestsResult result = new ListTestsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setTests(mapUnresolvedTests(gmsResult.getEntities())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list tests", e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } - // This method maps urns returned from the list endpoint into Partial Test objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial Test objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedTests(final SearchEntityArray entityArray) { final List results = new ArrayList<>(); for (final SearchEntity entity : entityArray) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java index 9c4b5a4d4e0fa3..6cb55100ec08eb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestResultsResolver.java @@ -20,10 +20,7 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; - -/** - * GraphQL Resolver used for fetching the list of tests for an entity - */ +/** GraphQL Resolver used for fetching the list of tests for an entity */ @Slf4j public class TestResultsResolver implements DataFetcher> { @@ -38,42 +35,44 @@ public CompletableFuture get(DataFetchingEnvironment environment) t final QueryContext context = environment.getContext(); final Urn entityUrn = Urn.createFromString(((Entity) environment.getSource()).getUrn()); - return CompletableFuture.supplyAsync(() -> { - - final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); + return CompletableFuture.supplyAsync( + () -> { + final com.linkedin.test.TestResults gmsTestResults = getTestResults(entityUrn, context); - if (gmsTestResults == null) { - return null; - } + if (gmsTestResults == null) { + return null; + } - TestResults testResults = new TestResults(); - testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); - testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); - return testResults; - }); + TestResults testResults = new TestResults(); + testResults.setPassing(mapTestResults(gmsTestResults.getPassing())); + testResults.setFailing(mapTestResults(gmsTestResults.getFailing())); + return testResults; + }); } @Nullable - private com.linkedin.test.TestResults getTestResults(final Urn entityUrn, final QueryContext context) { + private com.linkedin.test.TestResults getTestResults( + final Urn entityUrn, final QueryContext context) { try { - final EntityResponse entityResponse = _entityClient.getV2( - entityUrn.getEntityType(), - entityUrn, - ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), - context.getAuthentication()); - if (entityResponse.hasAspects() && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { + final EntityResponse entityResponse = + _entityClient.getV2( + entityUrn.getEntityType(), + entityUrn, + ImmutableSet.of(Constants.TEST_RESULTS_ASPECT_NAME), + context.getAuthentication()); + if (entityResponse.hasAspects() + && entityResponse.getAspects().containsKey(Constants.TEST_RESULTS_ASPECT_NAME)) { return new com.linkedin.test.TestResults( - entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME) - .getValue() - .data()); + entityResponse.getAspects().get(Constants.TEST_RESULTS_ASPECT_NAME).getValue().data()); } return null; } catch (Exception e) { - throw new RuntimeException("Failed to get test results", e); + throw new RuntimeException("Failed to get test results", e); } } - private List mapTestResults(final @Nonnull List gmsResults) { + private List mapTestResults( + final @Nonnull List gmsResults) { final List results = new ArrayList<>(); for (com.linkedin.test.TestResult gmsResult : gmsResults) { results.add(mapTestResult(gmsResult)); @@ -89,4 +88,4 @@ private TestResult mapTestResult(final @Nonnull com.linkedin.test.TestResult gms testResult.setType(TestResultType.valueOf(gmsResult.getType().toString())); return testResult; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java index 248da3e58d8aef..922c28097f83c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/TestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; + import com.linkedin.data.template.RecordTemplate; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; @@ -15,14 +17,9 @@ import java.util.Optional; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; - - public class TestUtils { - /** - * Returns true if the authenticated user is able to manage tests. - */ + /** Returns true if the authenticated user is able to manage tests. */ public static boolean canManageTests(@Nonnull QueryContext context) { return isAuthorized(context, Optional.empty(), PoliciesConfig.MANAGE_TESTS_PRIVILEGE); } @@ -38,11 +35,12 @@ public static EntityResponse buildEntityResponse(Map asp final EntityResponse entityResponse = new EntityResponse(); final EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); for (Map.Entry entry : aspects.entrySet()) { - aspectMap.put(entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); + aspectMap.put( + entry.getKey(), new EnvelopedAspect().setValue(new Aspect(entry.getValue().data()))); } entityResponse.setAspects(aspectMap); return entityResponse; } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java index 1dd8518076796a..b5d6e50fe07748 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolver.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.SetMode; @@ -13,15 +18,7 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.test.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - -/** - * Updates or updates a Test. Requires the MANAGE_TESTS privilege. - */ +/** Updates or updates a Test. Requires the MANAGE_TESTS privilege. */ public class UpdateTestResolver implements DataFetcher> { private final EntityClient _entityClient; @@ -35,26 +32,30 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final QueryContext context = environment.getContext(); final Authentication authentication = context.getAuthentication(); - return CompletableFuture.supplyAsync(() -> { - - if (canManageTests(context)) { + return CompletableFuture.supplyAsync( + () -> { + if (canManageTests(context)) { - final String urn = environment.getArgument("urn"); - final UpdateTestInput input = bindArgument(environment.getArgument("input"), UpdateTestInput.class); + final String urn = environment.getArgument("urn"); + final UpdateTestInput input = + bindArgument(environment.getArgument("input"), UpdateTestInput.class); - // Update the Test info - currently this simply creates a new test with same urn. - final TestInfo info = mapUpdateTestInput(input); + // Update the Test info - currently this simply creates a new test with same urn. + final TestInfo info = mapUpdateTestInput(input); - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); - try { - return _entityClient.ingestProposal(proposal, authentication, false); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to perform update against Test with urn %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), TEST_INFO_ASPECT_NAME, info); + try { + return _entityClient.ingestProposal(proposal, authentication, false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against Test with urn %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private static TestInfo mapUpdateTestInput(final UpdateTestInput input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java index 499e7c9ac177d9..ea234280ed6c26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaBlameResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaBlameInput; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -16,15 +18,13 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. TODO: Add tests for this resolver. */ @Slf4j -public class GetSchemaBlameResolver implements DataFetcher> { +public class GetSchemaBlameResolver + implements DataFetcher> { private final TimelineService _timelineService; public GetSchemaBlameResolver(TimelineService timelineService) { @@ -32,37 +32,37 @@ public GetSchemaBlameResolver(TimelineService timelineService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { - final GetSchemaBlameInput input = bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { + final GetSchemaBlameInput input = + bindArgument(environment.getArgument("input"), GetSchemaBlameInput.class); final String datasetUrnString = input.getDatasetUrn(); final long startTime = 0; final long endTime = 0; final String version = input.getVersion() == null ? null : input.getVersion(); - return CompletableFuture.supplyAsync(() -> { - try { - final Set changeCategorySet = Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); - final Urn datasetUrn = Urn.createFromString(datasetUrnString); - final List changeTransactionList = - _timelineService.getTimeline( - datasetUrn, - changeCategorySet, - startTime, - endTime, - null, - null, - false); - return SchemaBlameMapper.map(changeTransactionList, version); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set changeCategorySet = + Collections.singleton(ChangeCategory.TECHNICAL_SCHEMA); + final Urn datasetUrn = Urn.createFromString(datasetUrnString); + final List changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaBlameMapper.map(changeTransactionList, version); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java index cfad1395a61a88..5063dbbf7ccf32 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/timeline/GetSchemaVersionListResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.timeline; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListInput; import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; @@ -16,14 +18,12 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - /* Returns the most recent changes made to each column in a dataset at each dataset version. */ @Slf4j -public class GetSchemaVersionListResolver implements DataFetcher> { +public class GetSchemaVersionListResolver + implements DataFetcher> { private final TimelineService _timelineService; public GetSchemaVersionListResolver(TimelineService timelineService) { @@ -31,7 +31,8 @@ public GetSchemaVersionListResolver(TimelineService timelineService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get( + final DataFetchingEnvironment environment) throws Exception { final GetSchemaVersionListInput input = bindArgument(environment.getArgument("input"), GetSchemaVersionListInput.class); @@ -39,23 +40,27 @@ public CompletableFuture get(final DataFetchingEnvir final long startTime = 0; final long endTime = 0; - return CompletableFuture.supplyAsync(() -> { - try { - final Set changeCategorySet = new HashSet<>(); - changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); - Urn datasetUrn = Urn.createFromString(datasetUrnString); - List changeTransactionList = - _timelineService.getTimeline(datasetUrn, changeCategorySet, startTime, endTime, null, null, false); - return SchemaVersionListMapper.map(changeTransactionList); - } catch (URISyntaxException u) { - log.error( - String.format("Failed to list schema blame data, likely due to the Urn %s being invalid", datasetUrnString), - u); - return null; - } catch (Exception e) { - log.error("Failed to list schema blame data", e); - return null; - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + final Set changeCategorySet = new HashSet<>(); + changeCategorySet.add(ChangeCategory.TECHNICAL_SCHEMA); + Urn datasetUrn = Urn.createFromString(datasetUrnString); + List changeTransactionList = + _timelineService.getTimeline( + datasetUrn, changeCategorySet, startTime, endTime, null, null, false); + return SchemaVersionListMapper.map(changeTransactionList); + } catch (URISyntaxException u) { + log.error( + String.format( + "Failed to list schema blame data, likely due to the Urn %s being invalid", + datasetUrnString), + u); + return null; + } catch (Exception e) { + log.error("Failed to list schema blame data", e); + return null; + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java index 45998bdae45b04..14429696fefd42 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/AspectInterfaceTypeResolver.java @@ -5,16 +5,18 @@ import graphql.schema.TypeResolver; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Aspect} interface + * type. */ public class AspectInterfaceTypeResolver implements TypeResolver { - public AspectInterfaceTypeResolver() { } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this - // out in the case we ever want to return fields of type Aspect in graphql. Right now - // we just use Aspect to define the shared `version` field. - return null; - } + public AspectInterfaceTypeResolver() {} + + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + // TODO(Gabe): Fill this out. This method is not called today. We will need to fill this + // out in the case we ever want to return fields of type Aspect in graphql. Right now + // we just use Aspect to define the shared `version` field. + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java index 1a5f06da040141..52c20254332b39 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/EntityInterfaceTypeResolver.java @@ -6,27 +6,29 @@ import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; - import java.util.List; import java.util.stream.Collectors; /** - * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface type. + * Responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Entity} interface + * type. */ public class EntityInterfaceTypeResolver implements TypeResolver { - private final List> _entities; + private final List> _entities; - public EntityInterfaceTypeResolver(final List> entities) { - _entities = entities; - } + public EntityInterfaceTypeResolver(final List> entities) { + _entities = entities; + } - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - Object javaObject = env.getObject(); - final LoadableType filteredEntity = Iterables.getOnlyElement(_entities.stream() + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + Object javaObject = env.getObject(); + final LoadableType filteredEntity = + Iterables.getOnlyElement( + _entities.stream() .filter(entity -> javaObject.getClass().isAssignableFrom(entity.objectClass())) .collect(Collectors.toList())); - return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); - } + return env.getSchema().getObjectType(filteredEntity.objectClass().getSimpleName()); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java index a69500f24ee24a..aeeb9bafa1f4c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/HyperParameterValueTypeResolver.java @@ -4,30 +4,30 @@ import com.linkedin.datahub.graphql.generated.FloatBox; import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class HyperParameterValueTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; - public static final String INT_BOX = "IntBox"; - public static final String FLOAT_BOX = "FloatBox"; - public static final String BOOLEAN_BOX = "BooleanBox"; + public static final String STRING_BOX = "StringBox"; + public static final String INT_BOX = "IntBox"; + public static final String FLOAT_BOX = "FloatBox"; + public static final String BOOLEAN_BOX = "BooleanBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else if (env.getObject() instanceof IntBox) { - return env.getSchema().getObjectType(INT_BOX); - } else if (env.getObject() instanceof BooleanBox) { - return env.getSchema().getObjectType(BOOLEAN_BOX); - } else if (env.getObject() instanceof FloatBox) { - return env.getSchema().getObjectType(FLOAT_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else if (env.getObject() instanceof IntBox) { + return env.getSchema().getObjectType(INT_BOX); + } else if (env.getObject() instanceof BooleanBox) { + return env.getSchema().getObjectType(BOOLEAN_BOX); + } else if (env.getObject() instanceof FloatBox) { + return env.getSchema().getObjectType(FLOAT_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java index 25a9a540f51b18..ff190cff1339e6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/PlatformSchemaUnionTypeResolver.java @@ -8,17 +8,17 @@ public class PlatformSchemaUnionTypeResolver implements TypeResolver { - private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; - private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; + private static final String TABLE_SCHEMA_TYPE_NAME = "TableSchema"; + private static final String KEY_VALUE_SCHEMA_TYPE_NAME = "KeyValueSchema"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof TableSchema) { - return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); - } else if (env.getObject() instanceof KeyValueSchema) { - return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver"); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof TableSchema) { + return env.getSchema().getObjectType(TABLE_SCHEMA_TYPE_NAME); + } else if (env.getObject() instanceof KeyValueSchema) { + return env.getSchema().getObjectType(KEY_VALUE_SCHEMA_TYPE_NAME); + } else { + throw new RuntimeException("Unrecognized object type provided to type resolver"); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java index 0dc7b0485c51cf..c5be5725f1d45c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/ResultsTypeResolver.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.resolvers.type; import com.linkedin.datahub.graphql.generated.StringBox; - import graphql.TypeResolutionEnvironment; import graphql.schema.GraphQLObjectType; import graphql.schema.TypeResolver; public class ResultsTypeResolver implements TypeResolver { - public static final String STRING_BOX = "StringBox"; + public static final String STRING_BOX = "StringBox"; - @Override - public GraphQLObjectType getType(TypeResolutionEnvironment env) { - if (env.getObject() instanceof StringBox) { - return env.getSchema().getObjectType(STRING_BOX); - } else { - throw new RuntimeException("Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); - } + @Override + public GraphQLObjectType getType(TypeResolutionEnvironment env) { + if (env.getObject() instanceof StringBox) { + return env.getSchema().getObjectType(STRING_BOX); + } else { + throw new RuntimeException( + "Unrecognized object type provided to type resolver, Type:" + env.getObject().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java index 5263e6b9b7df6f..c66588008b1030 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/type/TimeSeriesAspectInterfaceTypeResolver.java @@ -6,7 +6,8 @@ public class TimeSeriesAspectInterfaceTypeResolver implements TypeResolver { - public TimeSeriesAspectInterfaceTypeResolver() { } + public TimeSeriesAspectInterfaceTypeResolver() {} + @Override public GraphQLObjectType getType(TypeResolutionEnvironment env) { // TODO(John): Fill this out. diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java index d02f1a5f786a74..db26da05a2ba49 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -10,14 +13,12 @@ import java.util.Objects; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.authorization.AuthorizationUtils.*; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - /** - * Resolver responsible for creating a password reset token that Admins can share with native users to reset their - * credentials. + * Resolver responsible for creating a password reset token that Admins can share with native users + * to reset their credentials. */ -public class CreateNativeUserResetTokenResolver implements DataFetcher> { +public class CreateNativeUserResetTokenResolver + implements DataFetcher> { private final NativeUserService _nativeUserService; public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserService) { @@ -25,7 +26,8 @@ public CreateNativeUserResetTokenResolver(final NativeUserService nativeUserServ } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final CreateNativeUserResetTokenInput input = bindArgument(environment.getArgument("input"), CreateNativeUserResetTokenInput.class); @@ -38,15 +40,18 @@ public CompletableFuture get(final DataFetchingEnvironment environme "Unauthorized to perform this action. Please contact your DataHub administrator."); } - return CompletableFuture.supplyAsync(() -> { - try { - String resetToken = - _nativeUserService.generateNativeUserPasswordResetToken(userUrnString, context.getAuthentication()); - return new ResetToken(resetToken); - } catch (Exception e) { - throw new RuntimeException( - String.format("Failed to generate password reset token for user: %s", userUrnString)); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + String resetToken = + _nativeUserService.generateNativeUserPasswordResetToken( + userUrnString, context.getAuthentication()); + return new ResetToken(resetToken); + } catch (Exception e) { + throw new RuntimeException( + String.format( + "Failed to generate password reset token for user: %s", userUrnString)); + } + }); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java index 69da642ad6bb18..215d53299c8ac1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/ListUsersResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; @@ -23,10 +26,6 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class ListUsersResolver implements DataFetcher> { private static final Integer DEFAULT_START = 0; @@ -40,48 +39,60 @@ public ListUsersResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { - final ListUsersInput input = bindArgument(environment.getArgument("input"), ListUsersInput.class); + final ListUsersInput input = + bindArgument(environment.getArgument("input"), ListUsersInput.class); final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - return CompletableFuture.supplyAsync(() -> { - try { - // First, get all policy Urns. - final SearchResult gmsResult = - _entityClient.search(CORP_USER_ENTITY_NAME, query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + return CompletableFuture.supplyAsync( + () -> { + try { + // First, get all policy Urns. + final SearchResult gmsResult = + _entityClient.search( + CORP_USER_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - // Then, get hydrate all users. - final Map entities = _entityClient.batchGetV2(CORP_USER_ENTITY_NAME, - new HashSet<>(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()) - ), null, context.getAuthentication()); + // Then, get hydrate all users. + final Map entities = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList())), + null, + context.getAuthentication()); - // Now that we have entities we can bind this to a result. - final ListUsersResult result = new ListUsersResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setUsers(mapEntities(entities.values())); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list users", e); - } - }); + // Now that we have entities we can bind this to a result. + final ListUsersResult result = new ListUsersResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setUsers(mapEntities(entities.values())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list users", e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } private List mapEntities(final Collection entities) { - return entities.stream() - .map(CorpUserMapper::map) - .collect(Collectors.toList()); + return entities.stream().map(CorpUserMapper::map).collect(Collectors.toList()); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java index 718810e4710e7e..7131a9d2a9a266 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/RemoveUserResolver.java @@ -10,10 +10,7 @@ import java.util.concurrent.CompletableFuture; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub Corp User - */ +/** Resolver responsible for hard deleting a particular DataHub Corp User */ @Slf4j public class RemoveUserResolver implements DataFetcher> { @@ -24,30 +21,39 @@ public RemoveUserResolver(final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); if (AuthorizationUtils.canManageUsersAndGroups(context)) { final String userUrn = environment.getArgument("urn"); final Urn urn = Urn.createFromString(userUrn); - return CompletableFuture.supplyAsync(() -> { - try { - _entityClient.deleteEntity(urn, context.getAuthentication()); - - // Asynchronously Delete all references to the entity (to return quickly) - CompletableFuture.runAsync(() -> { + return CompletableFuture.supplyAsync( + () -> { try { - _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + _entityClient.deleteEntity(urn, context.getAuthentication()); + + // Asynchronously Delete all references to the entity (to return quickly) + CompletableFuture.runAsync( + () -> { + try { + _entityClient.deleteEntityReferences(urn, context.getAuthentication()); + } catch (Exception e) { + log.error( + String.format( + "Caught exception while attempting to clear all entity references for user with urn %s", + urn), + e); + } + }); + + return true; } catch (Exception e) { - log.error(String.format("Caught exception while attempting to clear all entity references for user with urn %s", urn), e); + throw new RuntimeException( + String.format("Failed to perform delete against user with urn %s", userUrn), e); } }); - - return true; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against user with urn %s", userUrn), e); - } - }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java index ab04d26fb5801f..6a0e81a10f40b4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/user/UpdateUserStatusResolver.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -13,12 +16,9 @@ import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletableFuture; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - /** - * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform privilege. + * Resolver responsible for editing a CorpUser's status. Requires the Manage Users & Groups platform + * privilege. */ public class UpdateUserStatusResolver implements DataFetcher> { @@ -37,20 +37,28 @@ public CompletableFuture get(final DataFetchingEnvironment environment) final CorpUserStatus newStatus = CorpUserStatus.valueOf(environment.getArgument("status")); // Create ths status aspect - final com.linkedin.identity.CorpUserStatus statusAspect = new com.linkedin.identity.CorpUserStatus(); + final com.linkedin.identity.CorpUserStatus statusAspect = + new com.linkedin.identity.CorpUserStatus(); statusAspect.setStatus(newStatus.toString()); - statusAspect.setLastModified(new AuditStamp().setTime(System.currentTimeMillis()).setActor(Urn.createFromString(context.getActorUrn()))); - - return CompletableFuture.supplyAsync(() -> { - try { - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(userUrn), - CORP_USER_STATUS_ASPECT_NAME, statusAspect); - return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to update user status for urn", userUrn), e); - } - }); + statusAspect.setLastModified( + new AuditStamp() + .setTime(System.currentTimeMillis()) + .setActor(Urn.createFromString(context.getActorUrn()))); + + return CompletableFuture.supplyAsync( + () -> { + try { + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(userUrn), CORP_USER_STATUS_ASPECT_NAME, statusAspect); + return _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to update user status for urn", userUrn), e); + } + }); } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java index 6e39879dd56bcc..830c9013835d08 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -18,12 +20,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class CreateViewResolver implements DataFetcher> { @@ -34,29 +31,34 @@ public CreateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final CreateViewInput input = bindArgument(environment.getArgument("input"), CreateViewInput.class); + final CreateViewInput input = + bindArgument(environment.getArgument("input"), CreateViewInput.class); - return CompletableFuture.supplyAsync(() -> { - if (ViewUtils.canCreateView( - DataHubViewType.valueOf(input.getViewType().toString()), - context)) { - try { - final Urn urn = _viewService.createView( - DataHubViewType.valueOf(input.getViewType().toString()), - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - return createView(urn, input); - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to create View with input: %s", input), e); - } - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }); + return CompletableFuture.supplyAsync( + () -> { + if (ViewUtils.canCreateView( + DataHubViewType.valueOf(input.getViewType().toString()), context)) { + try { + final Urn urn = + _viewService.createView( + DataHubViewType.valueOf(input.getViewType().toString()), + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + return createView(urn, input); + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to create View with input: %s", input), e); + } + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }); } private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateViewInput input) { @@ -66,15 +68,20 @@ private DataHubView createView(@Nonnull final Urn urn, @Nonnull final CreateView .setViewType(input.getViewType()) .setName(input.getName()) .setDescription(input.getDescription()) - .setDefinition(new DataHubViewDefinition( - input.getDefinition().getEntityTypes(), - new DataHubViewFilter( - input.getDefinition().getFilter().getOperator(), - input.getDefinition().getFilter().getFilters().stream().map(filterInput -> - new FacetFilter(filterInput.getField(), filterInput.getCondition(), - filterInput.getValues(), - filterInput.getNegated())) - .collect(Collectors.toList())))) + .setDefinition( + new DataHubViewDefinition( + input.getDefinition().getEntityTypes(), + new DataHubViewFilter( + input.getDefinition().getFilter().getOperator(), + input.getDefinition().getFilter().getFilters().stream() + .map( + filterInput -> + new FacetFilter( + filterInput.getField(), + filterInput.getCondition(), + filterInput.getValues(), + filterInput.getNegated())) + .collect(Collectors.toList())))) .build(); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java index 2b8c3b8640aa88..a3b21ad0c96815 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolver.java @@ -11,10 +11,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; - -/** - * Resolver responsible for hard deleting a particular DataHub View - */ +/** Resolver responsible for hard deleting a particular DataHub View */ @Slf4j public class DeleteViewResolver implements DataFetcher> { @@ -25,24 +22,27 @@ public DeleteViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.deleteView(urn, context.getAuthentication()); - log.info(String.format("Successfully deleted View %s with urn", urn)); - return true; - } - throw new AuthorizationException( - "Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform delete against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.deleteView(urn, context.getAuthentication()); + log.info(String.format("Successfully deleted View %s with urn", urn)); + return true; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform delete against View with urn %s", urn), e); + } + }); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java index 51bbcfcfa25ae4..caa37f82648544 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,20 +32,14 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing global DataHub Views. - */ +/** Resolver used for listing global DataHub Views. */ @Slf4j public class ListGlobalViewsResolver implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -55,43 +51,50 @@ public ListGlobalViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListGlobalViewsInput input = bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); + final ListGlobalViewsInput input = + bindArgument(environment.getArgument("input"), ListGlobalViewsInput.class); - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - try { + try { - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list global Views", e); - } - }); + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list global Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedViews(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -107,7 +110,12 @@ private Filter buildFilters() { final AndFilterInput globalCriteria = new AndFilterInput(); List andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(DataHubViewType.GLOBAL.name()), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, + null, + ImmutableList.of(DataHubViewType.GLOBAL.name()), + false, + FilterOperator.EQUAL)); globalCriteria.setAnd(andConditions); return buildFilter(Collections.emptyList(), ImmutableList.of(globalCriteria)); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java index d8705e216503cc..945d2d50bcc3e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -30,21 +32,15 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - -/** - * Resolver used for listing the current user's DataHub Views. - */ +/** Resolver used for listing the current user's DataHub Views. */ @Slf4j public class ListMyViewsResolver implements DataFetcher> { private static final String CREATED_AT_FIELD = "createdAt"; private static final String VIEW_TYPE_FIELD = "type"; private static final String CREATOR_URN_FIELD = "createdBy"; - private static final SortCriterion DEFAULT_SORT_CRITERION = new SortCriterion() - .setField(CREATED_AT_FIELD) - .setOrder(SortOrder.DESCENDING); + private static final SortCriterion DEFAULT_SORT_CRITERION = + new SortCriterion().setField(CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING); private static final Integer DEFAULT_START = 0; private static final Integer DEFAULT_COUNT = 20; private static final String DEFAULT_QUERY = ""; @@ -56,44 +52,52 @@ public ListMyViewsResolver(@Nonnull final EntityClient entityClient) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); - final ListMyViewsInput input = bindArgument(environment.getArgument("input"), ListMyViewsInput.class); - - return CompletableFuture.supplyAsync(() -> { - final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); - final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); - final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); - final String viewType = input.getViewType() == null ? null : input.getViewType().toString(); - - try { - - final SearchResult gmsResult = _entityClient.search( - Constants.DATAHUB_VIEW_ENTITY_NAME, - query, - buildFilters(viewType, context.getActorUrn()), - DEFAULT_SORT_CRITERION, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - - final ListViewsResult result = new ListViewsResult(); - result.setStart(gmsResult.getFrom()); - result.setCount(gmsResult.getPageSize()); - result.setTotal(gmsResult.getNumEntities()); - result.setViews(mapUnresolvedViews(gmsResult.getEntities().stream() - .map(SearchEntity::getEntity) - .collect(Collectors.toList()))); - return result; - } catch (Exception e) { - throw new RuntimeException("Failed to list Views", e); - } - }); + final ListMyViewsInput input = + bindArgument(environment.getArgument("input"), ListMyViewsInput.class); + + return CompletableFuture.supplyAsync( + () -> { + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + final String viewType = + input.getViewType() == null ? null : input.getViewType().toString(); + + try { + + final SearchResult gmsResult = + _entityClient.search( + Constants.DATAHUB_VIEW_ENTITY_NAME, + query, + buildFilters(viewType, context.getActorUrn()), + DEFAULT_SORT_CRITERION, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + + final ListViewsResult result = new ListViewsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setViews( + mapUnresolvedViews( + gmsResult.getEntities().stream() + .map(SearchEntity::getEntity) + .collect(Collectors.toList()))); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list Views", e); + } + }); } - // This method maps urns returned from the list endpoint into Partial View objects which will be resolved be a separate Batch resolver. + // This method maps urns returned from the list endpoint into Partial View objects which will be + // resolved be a separate Batch resolver. private List mapUnresolvedViews(final List entityUrns) { final List results = new ArrayList<>(); for (final Urn urn : entityUrns) { @@ -110,14 +114,12 @@ private Filter buildFilters(@Nullable final String viewType, final String creato final AndFilterInput filterCriteria = new AndFilterInput(); final List andConditions = new ArrayList<>(); andConditions.add( - new FacetFilterInput(CREATOR_URN_FIELD, - null, - ImmutableList.of(creatorUrn), - false, - FilterOperator.EQUAL)); + new FacetFilterInput( + CREATOR_URN_FIELD, null, ImmutableList.of(creatorUrn), false, FilterOperator.EQUAL)); if (viewType != null) { andConditions.add( - new FacetFilterInput(VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); + new FacetFilterInput( + VIEW_TYPE_FIELD, null, ImmutableList.of(viewType), false, FilterOperator.EQUAL)); } filterCriteria.setAnd(andConditions); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java index 61e22da3c94447..5a52a57d9c374d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolver.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -16,11 +18,7 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - -/** - * Resolver responsible for updating a particular DataHub View - */ +/** Resolver responsible for updating a particular DataHub View */ @Slf4j public class UpdateViewResolver implements DataFetcher> { @@ -31,40 +29,47 @@ public UpdateViewResolver(@Nonnull final ViewService viewService) { } @Override - public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + public CompletableFuture get(final DataFetchingEnvironment environment) + throws Exception { final QueryContext context = environment.getContext(); final String urnStr = environment.getArgument("urn"); - final UpdateViewInput input = bindArgument(environment.getArgument("input"), UpdateViewInput.class); + final UpdateViewInput input = + bindArgument(environment.getArgument("input"), UpdateViewInput.class); final Urn urn = Urn.createFromString(urnStr); - return CompletableFuture.supplyAsync(() -> { - try { - if (ViewUtils.canUpdateView(_viewService, urn, context)) { - _viewService.updateView( - urn, - input.getName(), - input.getDescription(), - ViewUtils.mapDefinition(input.getDefinition()), - context.getAuthentication(), - System.currentTimeMillis()); - log.info(String.format("Successfully updated View %s with urn", urn)); - return getView(urn, context.getAuthentication()); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } catch (AuthorizationException e) { - throw e; - } catch (Exception e) { - throw new RuntimeException(String.format("Failed to perform update against View with urn %s", urn), e); - } - }); + return CompletableFuture.supplyAsync( + () -> { + try { + if (ViewUtils.canUpdateView(_viewService, urn, context)) { + _viewService.updateView( + urn, + input.getName(), + input.getDescription(), + ViewUtils.mapDefinition(input.getDefinition()), + context.getAuthentication(), + System.currentTimeMillis()); + log.info(String.format("Successfully updated View %s with urn", urn)); + return getView(urn, context.getAuthentication()); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } catch (AuthorizationException e) { + throw e; + } catch (Exception e) { + throw new RuntimeException( + String.format("Failed to perform update against View with urn %s", urn), e); + } + }); } - private DataHubView getView(@Nonnull final Urn urn, @Nonnull final Authentication authentication) { + private DataHubView getView( + @Nonnull final Urn urn, @Nonnull final Authentication authentication) { final EntityResponse maybeResponse = _viewService.getViewEntityResponse(urn, authentication); // If there is no response, there is a problem. if (maybeResponse == null) { throw new RuntimeException( - String.format("Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); + String.format( + "Failed to perform update to View with urn %s. Failed to find view in GMS.", urn)); } return DataHubViewMapper.map(maybeResponse); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java index dda0c3bebc2ebe..9da5f915ff31d6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtils.java @@ -26,39 +26,40 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class ViewUtils { /** * Returns true if the authenticated actor is allowed to create a view with the given parameters. * - * The user can create a View if it's a personal View specific to them, or - * if it's a Global view and they have the correct Platform privileges. + *

The user can create a View if it's a personal View specific to them, or if it's a Global + * view and they have the correct Platform privileges. * * @param type the type of the new View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ public static boolean canCreateView( - @Nonnull DataHubViewType type, - @Nonnull QueryContext context) { + @Nonnull DataHubViewType type, @Nonnull QueryContext context) { Objects.requireNonNull(type, "type must not be null"); Objects.requireNonNull(context, "context must not be null"); return DataHubViewType.PERSONAL.equals(type) - || (DataHubViewType.GLOBAL.equals(type) && AuthorizationUtils.canManageGlobalViews(context)); + || (DataHubViewType.GLOBAL.equals(type) + && AuthorizationUtils.canManageGlobalViews(context)); } - /** - * Returns true if the authenticated actor is allowed to update or delete - * the View with the specified urn. + * Returns true if the authenticated actor is allowed to update or delete the View with the + * specified urn. * * @param viewService an instance of {@link ViewService} * @param viewUrn the urn of the View * @param context the current GraphQL {@link QueryContext} - * @return true if the authenticator actor is allowed to change or delete the view, false otherwise. + * @return true if the authenticator actor is allowed to change or delete the view, false + * otherwise. */ - public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { + public static boolean canUpdateView( + @Nonnull ViewService viewService, @Nonnull Urn viewUrn, @Nonnull QueryContext context) { Objects.requireNonNull(viewService, "viewService must not be null"); Objects.requireNonNull(viewUrn, "viewUrn must not be null"); Objects.requireNonNull(context, "context must not be null"); @@ -67,16 +68,21 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U final DataHubViewInfo viewInfo = viewService.getViewInfo(viewUrn, context.getAuthentication()); if (viewInfo == null) { - throw new IllegalArgumentException(String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); + throw new IllegalArgumentException( + String.format("Failed to modify View. View with urn %s does not exist.", viewUrn)); } - // If the View is Global, then the user must have ability to manage global views OR must be its owner - if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) && AuthorizationUtils.canManageGlobalViews(context)) { + // If the View is Global, then the user must have ability to manage global views OR must be its + // owner + if (DataHubViewType.GLOBAL.equals(viewInfo.getType()) + && AuthorizationUtils.canManageGlobalViews(context)) { return true; } // If the View is Personal, then the current actor must be the owner. - return isViewOwner(viewInfo.getCreated().getActor(), UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); + return isViewOwner( + viewInfo.getCreated().getActor(), + UrnUtils.getUrn(context.getAuthentication().getActor().toUrnStr())); } /** @@ -86,28 +92,32 @@ public static boolean canUpdateView(@Nonnull ViewService viewService, @Nonnull U * @return the GMS model */ @Nonnull - public static DataHubViewDefinition mapDefinition(@Nonnull final DataHubViewDefinitionInput input) { + public static DataHubViewDefinition mapDefinition( + @Nonnull final DataHubViewDefinitionInput input) { Objects.requireNonNull(input, "input must not be null"); final DataHubViewDefinition result = new DataHubViewDefinition(); if (input.getFilter() != null) { result.setFilter(mapFilter(input.getFilter()), SetMode.IGNORE_NULL); } - result.setEntityTypes(new StringArray(input.getEntityTypes().stream().map(EntityTypeMapper::getName).collect( - Collectors.toList()))); + result.setEntityTypes( + new StringArray( + input.getEntityTypes().stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()))); return result; } /** - * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} object, - * which is then persisted to the backend in an aspect. + * Converts an instance of {@link DataHubViewFilterInput} into the corresponding {@link Filter} + * object, which is then persisted to the backend in an aspect. * - * We intentionally convert from a more rigid model to something more flexible to hedge for the case - * in which the views feature evolves to require more advanced filter capabilities. + *

We intentionally convert from a more rigid model to something more flexible to hedge for the + * case in which the views feature evolves to require more advanced filter capabilities. * - * The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), which cannot be - * rendered in full by the UI. We account for this on the read path by logging a warning and returning an empty - * View in such cases. + *

The risk we run is that people ingest Views through the Rest.li ingestion APIs (back door), + * which cannot be rendered in full by the UI. We account for this on the read path by logging a + * warning and returning an empty View in such cases. */ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { if (LogicalOperator.AND.equals(input.getOperator())) { @@ -121,19 +131,30 @@ private static Filter mapFilter(@Nonnull DataHubViewFilterInput input) { private static Filter buildAndFilter(@Nonnull List input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(input.stream().map(ResolverUtils::criterionFromFilter).collect(Collectors.toList())))) - )); + result.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + input.stream() + .map(ResolverUtils::criterionFromFilter) + .collect(Collectors.toList())))))); return result; } private static Filter buildOrFilter(@Nonnull List input) { final Filter result = new Filter(); - result.setOr(new ConjunctiveCriterionArray(input.stream().map(filter -> - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(ResolverUtils.criterionFromFilter(filter)))) - ) - .collect(Collectors.toList()))); + result.setOr( + new ConjunctiveCriterionArray( + input.stream() + .map( + filter -> + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of(ResolverUtils.criterionFromFilter(filter))))) + .collect(Collectors.toList()))); return result; } @@ -141,6 +162,5 @@ private static boolean isViewOwner(Urn creatorUrn, Urn actorUrn) { return creatorUrn.equals(actorUrn); } - private ViewUtils() { } - + private ViewUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java index 51fd503fff5784..49c8c24c2b6be2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/scalar/LongScalarType.java @@ -1,4 +1,3 @@ package com.linkedin.datahub.graphql.scalar; -public class LongScalarType { -} +public class LongScalarType {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java index 3bd8719a37abc4..df7c729cb14c1d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BatchMutableType.java @@ -1,16 +1,18 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; public interface BatchMutableType extends MutableType { - default Class batchInputClass() throws UnsupportedOperationException { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchInputClass method"); - } + default Class batchInputClass() throws UnsupportedOperationException { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchInputClass method"); + } - default List batchUpdate(@Nonnull final B[] updateInput, QueryContext context) throws Exception { - throw new UnsupportedOperationException(this.getClass().getName() + " does not implement batchUpdate method"); - } + default List batchUpdate(@Nonnull final B[] updateInput, QueryContext context) + throws Exception { + throw new UnsupportedOperationException( + this.getClass().getName() + " does not implement batchUpdate method"); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java index b50a229be0633b..368c126131af26 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/BrowsableEntityType.java @@ -5,42 +5,46 @@ import com.linkedin.datahub.graphql.generated.BrowseResults; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.FacetFilterInput; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** * Extension of {@link EntityType} containing methods required for 'browse' functionality. * - * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. * @param the key type for the DataLoader */ public interface BrowsableEntityType extends EntityType { - /** - * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. - * - * @param path the path to find browse results under - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & count. - * - * @param urn the entity urn to fetch browse paths for - * @param context the {@link QueryContext} corresponding to the request. - */ - @Nonnull - List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception; + /** + * Retrieves {@link BrowseResults} corresponding to a given path, list of filters, start, & count. + * + * @param path the path to find browse results under + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves a list of {@link BrowsePath} corresponding to a given path, list of filters, start, & + * count. + * + * @param urn the entity urn to fetch browse paths for + * @param context the {@link QueryContext} corresponding to the request. + */ + @Nonnull + List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java index 4185288776c065..43e4c1be55b71c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/EntityType.java @@ -3,20 +3,20 @@ import com.linkedin.datahub.graphql.generated.Entity; import java.util.function.Function; - /** - * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, etc.). + * GQL graph type representing a top-level GMS entity (eg. Dataset, User, DataPlatform, Chart, + * etc.). * * @param : The GraphQL object type corresponding to the entity, must be of type {@link Entity} * @param the key type for the DataLoader */ public interface EntityType extends LoadableType { - /** - * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the Graph type, eg. 'DATASET' - */ - com.linkedin.datahub.graphql.generated.EntityType type(); - - Function getKeyProvider(); + /** + * Retrieves the {@link com.linkedin.datahub.graphql.generated.EntityType} associated with the + * Graph type, eg. 'DATASET' + */ + com.linkedin.datahub.graphql.generated.EntityType type(); + Function getKeyProvider(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java index a21fab09b79c36..9f9fe1f28994c4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/LoadableType.java @@ -2,10 +2,9 @@ import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; - import graphql.execution.DataFetcherResult; -import javax.annotation.Nonnull; import java.util.List; +import javax.annotation.Nonnull; /** * GQL graph type that can be loaded from a downstream service by primary key. @@ -15,35 +14,38 @@ */ public interface LoadableType { - /** - * Returns generated GraphQL class associated with the type - */ - Class objectClass(); - - /** - * Returns the name of the type, to be used in creating a corresponding GraphQL {@link org.dataloader.DataLoader} - */ - default String name() { - return objectClass().getSimpleName(); - } - - /** - * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity cannot be found. - * - * @param key to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - default DataFetcherResult load(@Nonnull final K key, @Nonnull final QueryContext context) throws Exception { - return batchLoad(ImmutableList.of(key), context).get(0); - }; - - /** - * Retrieves an list of entities given a list of urn strings. The list returned is expected to - * be of same length of the list of urns, where nulls are provided in place of an entity object if an entity cannot be found. - * - * @param keys to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - List> batchLoad(@Nonnull final List keys, @Nonnull final QueryContext context) throws Exception; - + /** Returns generated GraphQL class associated with the type */ + Class objectClass(); + + /** + * Returns the name of the type, to be used in creating a corresponding GraphQL {@link + * org.dataloader.DataLoader} + */ + default String name() { + return objectClass().getSimpleName(); + } + + /** + * Retrieves an entity by urn string. Null is provided in place of an entity object if an entity + * cannot be found. + * + * @param key to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + default DataFetcherResult load(@Nonnull final K key, @Nonnull final QueryContext context) + throws Exception { + return batchLoad(ImmutableList.of(key), context).get(0); + } + ; + + /** + * Retrieves an list of entities given a list of urn strings. The list returned is expected to be + * of same length of the list of urns, where nulls are provided in place of an entity object if an + * entity cannot be found. + * + * @param keys to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + List> batchLoad( + @Nonnull final List keys, @Nonnull final QueryContext context) throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java index 94f1200d3a7833..fa241929133241 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/MutableType.java @@ -1,7 +1,6 @@ package com.linkedin.datahub.graphql.types; import com.linkedin.datahub.graphql.QueryContext; - import javax.annotation.Nonnull; /** @@ -10,18 +9,16 @@ * @param : The input type corresponding to the write. */ public interface MutableType { - /** - * Returns generated GraphQL class associated with the input type - */ - - Class inputClass(); + /** Returns generated GraphQL class associated with the input type */ + Class inputClass(); - /** - * Update an entity by urn - * - * @param urn - * @param input input type - * @param context the {@link QueryContext} corresponding to the request. - */ - T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) throws Exception; + /** + * Update an entity by urn + * + * @param urn + * @param input input type + * @param context the {@link QueryContext} corresponding to the request. + */ + T update(@Nonnull final String urn, @Nonnull final I input, @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java index 96875956d22c10..a5ade054e71ebb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/SearchableEntityType.java @@ -6,52 +6,61 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.metadata.query.filter.Filter; - +import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.List; /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. * - * Extension of {@link EntityType} containing methods required for 'search' functionality. + *

Extension of {@link EntityType} containing methods required for 'search' functionality. * - * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` interface. + * @param : The GraphQL object type corresponding to the entity, must extend the `Entity` + * interface. */ @Deprecated public interface SearchableEntityType extends EntityType { - /** - * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL resolver. - * - * Retrieves {@link SearchResults} corresponding to a given query string, list of filters, start index, & count. - * - * @param query query text - * @param filters list of filters that should be applied to search results, null if non were provided - * @param start the initial offset of returned results - * @param count the number of results to retrieve - * @param context the {@link QueryContext} corresponding to the request. - */ - @Deprecated - SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception; - - /** - * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of filters, & limit. - * - * @param query query text - * @param field the name of the field to autocomplete against, null if one was not provided - * @param filters list of filters that should be applied to search results, null if non were provided - * @param limit the maximum number of autocomplete suggestions to be returned - * @param context the {@link QueryContext} corresponding to the request. - */ - AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception; + /** + * Deprecated - this is no longer used in favor of the search and searchAcrossEntities GraphQL + * resolver. + * + *

Retrieves {@link SearchResults} corresponding to a given query string, list of filters, + * start index, & count. + * + * @param query query text + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param start the initial offset of returned results + * @param count the number of results to retrieve + * @param context the {@link QueryContext} corresponding to the request. + */ + @Deprecated + SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception; + /** + * Retrieves {@link AutoCompleteResults} corresponding to a given query string, field, list of + * filters, & limit. + * + * @param query query text + * @param field the name of the field to autocomplete against, null if one was not provided + * @param filters list of filters that should be applied to search results, null if non were + * provided + * @param limit the maximum number of autocomplete suggestions to be returned + * @param context the {@link QueryContext} corresponding to the request. + */ + AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java index c9e2c322ace8df..00e9badf5e3456 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectMapper.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AspectMapper { public static final AspectMapper INSTANCE = new AspectMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java index f3fdfdaa86f9e3..45e80822b12c8c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/aspect/AspectType.java @@ -2,8 +2,8 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.VersionedAspectKey; import com.linkedin.datahub.graphql.generated.Aspect; import com.linkedin.datahub.graphql.types.LoadableType; import com.linkedin.entity.EntityResponse; @@ -35,45 +35,55 @@ public String name() { } /** - * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list returned is expected to - * be of same length of the list of keys, where nulls are provided in place of an aspect object if an entity cannot be found. + * Retrieves an list of aspects given a list of {@link VersionedAspectKey} structs. The list + * returned is expected to be of same length of the list of keys, where nulls are provided in + * place of an aspect object if an entity cannot be found. * * @param keys to retrieve * @param context the {@link QueryContext} corresponding to the request. */ - public List> batchLoad(@Nonnull List keys, @Nonnull QueryContext context) throws Exception { + public List> batchLoad( + @Nonnull List keys, @Nonnull QueryContext context) throws Exception { try { - return keys.stream().map(key -> { - try { - Urn entityUrn = Urn.createFromString(key.getUrn()); + return keys.stream() + .map( + key -> { + try { + Urn entityUrn = Urn.createFromString(key.getUrn()); - Map response = _entityClient.batchGetV2( - entityUrn.getEntityType(), - ImmutableSet.of(entityUrn), - ImmutableSet.of(key.getAspectName()), - context.getAuthentication() - ); + Map response = + _entityClient.batchGetV2( + entityUrn.getEntityType(), + ImmutableSet.of(entityUrn), + ImmutableSet.of(key.getAspectName()), + context.getAuthentication()); - EntityResponse entityResponse = response.get(entityUrn); + EntityResponse entityResponse = response.get(entityUrn); - if (entityResponse == null || entityResponse.getAspects().get(key.getAspectName()) == null) { - // The aspect was not found. Return null. - return DataFetcherResult.newResult().data(null).build(); - } - final EnvelopedAspect aspect = entityResponse.getAspects().get(key.getAspectName()); - return DataFetcherResult.newResult().data(AspectMapper.map(aspect, entityUrn)).build(); - } catch (Exception e) { - if (e instanceof RestLiResponseException) { - // if no aspect is found, restli will return a 404 rather than null - // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls - if (((RestLiResponseException) e).getStatus() == 404) { - return DataFetcherResult.newResult().data(null).build(); - } - } - throw new RuntimeException(String.format("Failed to load Aspect for entity %s", key.getUrn()), e); - } - }).collect(Collectors.toList()); + if (entityResponse == null + || entityResponse.getAspects().get(key.getAspectName()) == null) { + // The aspect was not found. Return null. + return DataFetcherResult.newResult().data(null).build(); + } + final EnvelopedAspect aspect = + entityResponse.getAspects().get(key.getAspectName()); + return DataFetcherResult.newResult() + .data(AspectMapper.map(aspect, entityUrn)) + .build(); + } catch (Exception e) { + if (e instanceof RestLiResponseException) { + // if no aspect is found, restli will return a 404 rather than null + // https://linkedin.github.io/rest.li/user_guide/restli_server#returning-nulls + if (((RestLiResponseException) e).getStatus() == 404) { + return DataFetcherResult.newResult().data(null).build(); + } + } + throw new RuntimeException( + String.format("Failed to load Aspect for entity %s", key.getUrn()), e); + } + }) + .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Aspects", e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java index e1d81bb31f4712..2536f4d2521ee0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionMapper.java @@ -25,7 +25,6 @@ import java.util.Collections; import java.util.stream.Collectors; - public class AssertionMapper { public static Assertion map(final EntityResponse entityResponse) { @@ -36,15 +35,18 @@ public static Assertion map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.ASSERTION); - final EnvelopedAspect envelopedAssertionInfo = aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); + final EnvelopedAspect envelopedAssertionInfo = + aspects.get(Constants.ASSERTION_INFO_ASPECT_NAME); if (envelopedAssertionInfo != null) { result.setInfo(mapAssertionInfo(new AssertionInfo(envelopedAssertionInfo.getValue().data()))); } - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(Constants.UNKNOWN_DATA_PLATFORM); @@ -60,7 +62,8 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion new com.linkedin.datahub.graphql.generated.AssertionInfo(); assertionInfo.setType(AssertionType.valueOf(gmsAssertionInfo.getType().name())); if (gmsAssertionInfo.hasDatasetAssertion()) { - DatasetAssertionInfo datasetAssertion = mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); + DatasetAssertionInfo datasetAssertion = + mapDatasetAssertionInfo(gmsAssertionInfo.getDatasetAssertion()); assertionInfo.setDatasetAssertion(datasetAssertion); } return assertionInfo; @@ -69,25 +72,25 @@ private static com.linkedin.datahub.graphql.generated.AssertionInfo mapAssertion private static DatasetAssertionInfo mapDatasetAssertionInfo( final com.linkedin.assertion.DatasetAssertionInfo gmsDatasetAssertion) { DatasetAssertionInfo datasetAssertion = new DatasetAssertionInfo(); - datasetAssertion.setDatasetUrn( - gmsDatasetAssertion.getDataset().toString()); - datasetAssertion.setScope( - DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); + datasetAssertion.setDatasetUrn(gmsDatasetAssertion.getDataset().toString()); + datasetAssertion.setScope(DatasetAssertionScope.valueOf(gmsDatasetAssertion.getScope().name())); if (gmsDatasetAssertion.hasFields()) { - datasetAssertion.setFields(gmsDatasetAssertion.getFields() - .stream() - .map(AssertionMapper::mapDatasetSchemaField) - .collect(Collectors.toList())); + datasetAssertion.setFields( + gmsDatasetAssertion.getFields().stream() + .map(AssertionMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } else { datasetAssertion.setFields(Collections.emptyList()); } // Agg if (gmsDatasetAssertion.hasAggregation()) { - datasetAssertion.setAggregation(AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); + datasetAssertion.setAggregation( + AssertionStdAggregation.valueOf(gmsDatasetAssertion.getAggregation().name())); } // Op - datasetAssertion.setOperator(AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); + datasetAssertion.setOperator( + AssertionStdOperator.valueOf(gmsDatasetAssertion.getOperator().name())); // Params if (gmsDatasetAssertion.hasParameters()) { @@ -98,7 +101,8 @@ private static DatasetAssertionInfo mapDatasetAssertionInfo( datasetAssertion.setNativeType(gmsDatasetAssertion.getNativeType()); } if (gmsDatasetAssertion.hasNativeParameters()) { - datasetAssertion.setNativeParameters(StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); + datasetAssertion.setNativeParameters( + StringMapMapper.map(gmsDatasetAssertion.getNativeParameters())); } else { datasetAssertion.setNativeParameters(Collections.emptyList()); } @@ -119,7 +123,8 @@ private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { return new SchemaFieldRef(schemaFieldUrn.toString(), schemaFieldUrn.getEntityKey().get(1)); } - private static AssertionStdParameters mapParameters(final com.linkedin.assertion.AssertionStdParameters params) { + private static AssertionStdParameters mapParameters( + final com.linkedin.assertion.AssertionStdParameters params) { final AssertionStdParameters result = new AssertionStdParameters(); if (params.hasValue()) { result.setValue(mapParameter(params.getValue())); @@ -133,13 +138,13 @@ private static AssertionStdParameters mapParameters(final com.linkedin.assertion return result; } - private static AssertionStdParameter mapParameter(final com.linkedin.assertion.AssertionStdParameter param) { + private static AssertionStdParameter mapParameter( + final com.linkedin.assertion.AssertionStdParameter param) { final AssertionStdParameter result = new AssertionStdParameter(); result.setType(AssertionStdParameterType.valueOf(param.getType().name())); result.setValue(param.getValue()); return result; } - private AssertionMapper() { - } + private AssertionMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java index 3493afdd8bd841..ac5cce1191e5dd 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/assertion/AssertionType.java @@ -20,69 +20,71 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AssertionType + implements com.linkedin.datahub.graphql.types.EntityType { -public class AssertionType implements com.linkedin.datahub.graphql.types.EntityType { + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ASSERTION_KEY_ASPECT_NAME, + Constants.ASSERTION_INFO_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + private final EntityClient _entityClient; - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ASSERTION_KEY_ASPECT_NAME, - Constants.ASSERTION_INFO_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME - ); - private final EntityClient _entityClient; + public AssertionType(final EntityClient entityClient) { + _entityClient = entityClient; + } - public AssertionType(final EntityClient entityClient) { - _entityClient = entityClient; - } + @Override + public EntityType type() { + return EntityType.ASSERTION; + } - @Override - public EntityType type() { - return EntityType.ASSERTION; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return Assertion.class; - } + @Override + public Class objectClass() { + return Assertion.class; + } - @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List assertionUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List assertionUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); - try { - final Map entities = _entityClient.batchGetV2( - Constants.ASSERTION_ENTITY_NAME, - new HashSet<>(assertionUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map entities = + _entityClient.batchGetV2( + Constants.ASSERTION_ENTITY_NAME, + new HashSet<>(assertionUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List gmsResults = new ArrayList<>(); - for (Urn urn : assertionUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(AssertionMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Assertions", e); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : assertionUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(AssertionMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Assertions", e); } + } - private Urn getUrn(final String urnStr) { - try { - return Urn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); - } + private Urn getUrn(final String urnStr) { + try { + return Urn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } -} \ No newline at end of file + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java index d9f25a7cec8e1b..bfe2ccbe34166d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/AccessTokenMetadataType.java @@ -4,9 +4,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; import com.linkedin.datahub.graphql.types.auth.mappers.AccessTokenMetadataMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; @@ -21,7 +21,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class AccessTokenMetadataType implements com.linkedin.datahub.graphql.types.EntityType { @@ -48,13 +47,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List keys, - @Nonnull QueryContext context) throws Exception { - final List tokenInfoUrns = keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List keys, @Nonnull QueryContext context) throws Exception { + final List tokenInfoUrns = + keys.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(Constants.ACCESS_TOKEN_ENTITY_NAME, new HashSet<>(tokenInfoUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + Constants.ACCESS_TOKEN_ENTITY_NAME, + new HashSet<>(tokenInfoUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -62,9 +65,13 @@ public List> batchLoad(@Nonnull List gmsResult == null ? null : DataFetcherResult.newResult() - .data(AccessTokenMetadataMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(AccessTokenMetadataMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Access Token Info", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java index 9b387578798967..a519a65e5cb6b8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/auth/mappers/AccessTokenMetadataMapper.java @@ -2,8 +2,8 @@ import com.linkedin.access.token.DataHubAccessTokenInfo; import com.linkedin.data.DataMap; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AccessTokenMetadata; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.entity.EntityResponse; @@ -11,7 +11,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nonnull; - public class AccessTokenMetadataMapper implements ModelMapper { public static final AccessTokenMetadataMapper INSTANCE = new AccessTokenMetadataMapper(); @@ -29,13 +28,15 @@ public AccessTokenMetadata apply(@Nonnull final EntityResponse input) { metadata.setType(EntityType.ACCESS_TOKEN); final EnvelopedAspectMap aspectMap = input.getAspects(); - final MappingHelper mappingHelper = new MappingHelper<>(aspectMap, metadata); + final MappingHelper mappingHelper = + new MappingHelper<>(aspectMap, metadata); mappingHelper.mapToResult(Constants.ACCESS_TOKEN_INFO_NAME, this::mapTokenInfo); return mappingHelper.getResult(); } - private void mapTokenInfo(@Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { + private void mapTokenInfo( + @Nonnull final AccessTokenMetadata accessTokenMetadata, @Nonnull final DataMap dataMap) { final DataHubAccessTokenInfo tokenInfo = new DataHubAccessTokenInfo(dataMap); accessTokenMetadata.setName(tokenInfo.getName()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index fa0e3cd8568033..ba8e96159b0bf5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.chart; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.ChartUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -36,8 +39,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,203 +57,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - +public class ChartType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { -public class ChartType implements SearchableEntityType, BrowsableEntityType, MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + CHART_KEY_ASPECT_NAME, + CHART_INFO_ASPECT_NAME, + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, + CHART_QUERY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set FACET_FIELDS = + ImmutableSet.of("access", "queryType", "tool", "type"); - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - CHART_KEY_ASPECT_NAME, - CHART_INFO_ASPECT_NAME, - EDITABLE_CHART_PROPERTIES_ASPECT_NAME, - CHART_QUERY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("access", "queryType", "tool", "type"); - - private final EntityClient _entityClient; - - public ChartType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class inputClass() { - return ChartUpdateInput.class; - } + public ChartType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.CHART; - } + @Override + public Class inputClass() { + return ChartUpdateInput.class; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.CHART; + } - @Override - public Class objectClass() { - return Chart.class; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map chartMap = - _entityClient.batchGetV2( - CHART_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); + @Override + public Class objectClass() { + return Chart.class; + } - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(chartMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsChart -> gmsChart == null ? null : DataFetcherResult.newResult() - .data(ChartMapper.map(gmsChart)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Charts", e); - } - } + @Override + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map chartMap = + _entityClient.batchGetV2( + CHART_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "chart", - query, - facetFilters, - start, - count, - context.getAuthentication(), - new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(chartMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsChart -> + gmsChart == null + ? null + : DataFetcherResult.newResult() + .data(ChartMapper.map(gmsChart)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Charts", e); } + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "chart", query, - filters, - limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "chart", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("chart", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "chart", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - private ChartUrn getChartUrn(String urnStr) { - try { - return ChartUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); - } - } + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getChartUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - @Override - public Chart update(@Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = ChartUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + private ChartUrn getChartUrn(String urnStr) { + try { + return ChartUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve chart with urn %s, invalid urn", urnStr)); + } + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public Chart update( + @Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = ChartUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } - private boolean isAuthorized(@Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CHART_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull ChartUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CHART_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final ChartUpdateInput updateInput) { - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java index e0ffc57ddf5194..0ef52c9f457168 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -28,13 +30,13 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -51,184 +53,211 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartMapper implements ModelMapper { + public static final ChartMapper INSTANCE = new ChartMapper(); -public class ChartMapper implements ModelMapper { + public static Chart map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - public static final ChartMapper INSTANCE = new ChartMapper(); + @Override + public Chart apply(@Nonnull final EntityResponse entityResponse) { + final Chart result = new Chart(); + Urn entityUrn = entityResponse.getUrn(); - public static Chart map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CHART); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - @Override - public Chart apply(@Nonnull final EntityResponse entityResponse) { - final Chart result = new Chart(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CHART); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); - mappingHelper.mapToResult(CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); - mappingHelper.mapToResult(EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (chart, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CHART_KEY_ASPECT_NAME, this::mapChartKey); + mappingHelper.mapToResult( + CHART_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapChartInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult(CHART_QUERY_ASPECT_NAME, this::mapChartQuery); + mappingHelper.mapToResult( + EDITABLE_CHART_PROPERTIES_ASPECT_NAME, this::mapEditableChartProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (chart, dataMap) -> chart.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (chart, dataMap) -> - chart.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (chart, dataMap) -> - chart.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (chart, dataMap) -> chart.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (chart, dataMap) -> + chart.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (chart, dataMap) -> chart.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (chart, dataMap) -> - chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (chart, dataMap) -> chart.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (chart, dataMap) -> chart.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (chart, dataMap) -> - chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (chart, dataMap) -> + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, (chart, dataMap) -> chart.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (chart, dataMap) -> chart.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } - private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final ChartKey gmsKey = new ChartKey(dataMap); - chart.setChartId(gmsKey.getChartId()); - chart.setTool(gmsKey.getDashboardTool()); - chart.setPlatform(DataPlatform.builder() + private void mapChartKey(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final ChartKey gmsKey = new ChartKey(dataMap); + chart.setChartId(gmsKey.getChartId()); + chart.setTool(gmsKey.getDashboardTool()); + chart.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } - private void mapChartInfo(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); - chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); - chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); - } + private void mapChartInfo( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final com.linkedin.chart.ChartInfo gmsChartInfo = new com.linkedin.chart.ChartInfo(dataMap); + chart.setInfo(mapInfo(gmsChartInfo, entityUrn)); + chart.setProperties(mapChartInfoToProperties(gmsChartInfo, entityUrn)); + } - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} - */ - private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartInfo result = new ChartInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasInputs()) { - result.setInputs(info.getInputs().stream().map(input -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(input.getDatasetUrn().toString()); - return dataset; - }).collect(Collectors.toList())); - } - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; - } + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to deprecated GraphQL {@link ChartInfo} */ + private ChartInfo mapInfo(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartInfo result = new ChartInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - /** - * Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} - */ - private ChartProperties mapChartInfoToProperties(final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { - final ChartProperties result = new ChartProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - if (info.hasType()) { - result.setType(ChartType.valueOf(info.getType().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasChartUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getChartUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasInputs()) { + result.setInputs( + info.getInputs().stream() + .map( + input -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(input.getDatasetUrn().toString()); + return dataset; + }) + .collect(Collectors.toList())); } - private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); - chart.setQuery(mapQuery(gmsChartQuery)); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { - final ChartQuery result = new ChartQuery(); - result.setRawQuery(query.getRawQuery()); - result.setType(ChartQueryType.valueOf(query.getType().toString())); - return result; + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); } - - private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); - final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); - chartEditableProperties.setDescription(editableChartProperties.getDescription()); - chart.setEditableProperties(chartEditableProperties); + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + /** Maps GMS {@link com.linkedin.chart.ChartInfo} to new GraphQL {@link ChartProperties} */ + private ChartProperties mapChartInfoToProperties( + final com.linkedin.chart.ChartInfo info, @Nonnull Urn entityUrn) { + final ChartProperties result = new ChartProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); - private void mapGlobalTags(@Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - chart.setGlobalTags(globalTags); - chart.setTags(globalTags); + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + if (info.hasType()) { + result.setType(ChartType.valueOf(info.getType().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasChartUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getChartUrl().toString()); } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private void mapChartQuery(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.chart.ChartQuery gmsChartQuery = new com.linkedin.chart.ChartQuery(dataMap); + chart.setQuery(mapQuery(gmsChartQuery)); + } + + private ChartQuery mapQuery(final com.linkedin.chart.ChartQuery query) { + final ChartQuery result = new ChartQuery(); + result.setRawQuery(query.getRawQuery()); + result.setType(ChartQueryType.valueOf(query.getType().toString())); + return result; + } + + private void mapEditableChartProperties(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(dataMap); + final ChartEditableProperties chartEditableProperties = new ChartEditableProperties(); + chartEditableProperties.setDescription(editableChartProperties.getDescription()); + chart.setEditableProperties(chartEditableProperties); + } - private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - chart.setContainer(Container - .builder() + private void mapGlobalTags( + @Nonnull Chart chart, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + chart.setGlobalTags(globalTags); + chart.setTags(globalTags); + } + + private void mapContainers(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + chart.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); - } + private void mapDomains(@Nonnull Chart chart, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + chart.setDomain(DomainAssociationMapper.map(domains, chart.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java index b52ddad0b0071e..f2a434b58686c5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/ChartUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.chart.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.chart.EditableChartProperties; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -17,68 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class ChartUpdateInputMapper + implements InputModelMapper, Urn> { + public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); -public class ChartUpdateInputMapper implements InputModelMapper, Urn> { + public static Collection map( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(chartUpdateInput, actor); + } - public static final ChartUpdateInputMapper INSTANCE = new ChartUpdateInputMapper(); + @Override + public Collection apply( + @Nonnull final ChartUpdateInput chartUpdateInput, @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - public static Collection map(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(chartUpdateInput, actor); + if (chartUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } - @Override - public Collection apply(@Nonnull final ChartUpdateInput chartUpdateInput, - @Nonnull final Urn actor) { - final Collection proposals = new ArrayList<>(3); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(CHART_ENTITY_NAME); - - if (chartUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper - .aspectToProposal(OwnershipUpdateMapper.map(chartUpdateInput.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); - } - - if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (chartUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - // Tags overrides global tags if provided - if (chartUpdateInput.getTags() != null) { - globalTags.setTags( - new TagAssociationArray( - chartUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } - - if (chartUpdateInput.getEditableProperties() != null) { - final EditableChartProperties editableChartProperties = new EditableChartProperties(); - editableChartProperties.setDescription(chartUpdateInput.getEditableProperties().getDescription()); - if (!editableChartProperties.hasCreated()) { - editableChartProperties.setCreated(auditStamp); - } - editableChartProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); - } + if (chartUpdateInput.getTags() != null || chartUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (chartUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + // Tags overrides global tags if provided + if (chartUpdateInput.getTags() != null) { + globalTags.setTags( + new TagAssociationArray( + chartUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (chartUpdateInput.getEditableProperties() != null) { + final EditableChartProperties editableChartProperties = new EditableChartProperties(); + editableChartProperties.setDescription( + chartUpdateInput.getEditableProperties().getDescription()); + if (!editableChartProperties.hasCreated()) { + editableChartProperties.setCreated(auditStamp); + } + editableChartProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableChartProperties, EDITABLE_CHART_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java index d6ef713f3ade6b..4da18403f95cca 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/mappers/InputFieldsMapper.java @@ -7,29 +7,36 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class InputFieldsMapper { - public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); - - public static com.linkedin.datahub.graphql.generated.InputFields map(@Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.InputFields apply(@Nonnull final InputFields input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.InputFields result = new com.linkedin.datahub.graphql.generated.InputFields(); - result.setFields(input.getFields().stream().map(field -> { - InputField fieldResult = new InputField(); - - if (field.hasSchemaField()) { - fieldResult.setSchemaField(SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); - } - if (field.hasSchemaFieldUrn()) { - fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); - } - return fieldResult; - }).collect(Collectors.toList())); - - return result; - } + public static final InputFieldsMapper INSTANCE = new InputFieldsMapper(); + + public static com.linkedin.datahub.graphql.generated.InputFields map( + @Nonnull final InputFields metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.InputFields apply( + @Nonnull final InputFields input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.InputFields result = + new com.linkedin.datahub.graphql.generated.InputFields(); + result.setFields( + input.getFields().stream() + .map( + field -> { + InputField fieldResult = new InputField(); + + if (field.hasSchemaField()) { + fieldResult.setSchemaField( + SchemaFieldMapper.map(field.getSchemaField(), entityUrn)); + } + if (field.hasSchemaFieldUrn()) { + fieldResult.setSchemaFieldUrn(field.getSchemaFieldUrn().toString()); + } + return fieldResult; + }) + .collect(Collectors.toList())); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java index beb2b64e1dd7d5..1f952bb6a2bd1f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/AuditStampMapper.java @@ -1,29 +1,27 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class AuditStampMapper implements ModelMapper { - public static final AuditStampMapper INSTANCE = new AuditStampMapper(); + public static final AuditStampMapper INSTANCE = new AuditStampMapper(); - public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - return INSTANCE.apply(auditStamp); - } + public static AuditStamp map(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + return INSTANCE.apply(auditStamp); + } - @Override - public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { - final AuditStamp result = new AuditStamp(); - result.setActor(auditStamp.getActor().toString()); - result.setTime(auditStamp.getTime()); - return result; - } + @Override + public AuditStamp apply(@Nonnull final com.linkedin.common.AuditStamp auditStamp) { + final AuditStamp result = new AuditStamp(); + result.setActor(auditStamp.getActor().toString()); + result.setTime(auditStamp.getTime()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java index 41ee99fa412ad1..79b7cf8e050d32 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/BrowsePathsV2Mapper.java @@ -4,10 +4,9 @@ import com.linkedin.datahub.graphql.generated.BrowsePathEntry; import com.linkedin.datahub.graphql.generated.BrowsePathV2; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathsV2Mapper implements ModelMapper { @@ -20,7 +19,8 @@ public static BrowsePathV2 map(@Nonnull final BrowsePathsV2 metadata) { @Override public BrowsePathV2 apply(@Nonnull final BrowsePathsV2 input) { final BrowsePathV2 result = new BrowsePathV2(); - final List path = input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); + final List path = + input.getPath().stream().map(this::mapBrowsePathEntry).collect(Collectors.toList()); result.setPath(path); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java index 7144730ba9337e..e3a09bc8926a30 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/ChangeAuditStampsMapper.java @@ -3,8 +3,8 @@ import com.linkedin.datahub.graphql.generated.ChangeAuditStamps; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -public class ChangeAuditStampsMapper implements ModelMapper { +public class ChangeAuditStampsMapper + implements ModelMapper { public static final ChangeAuditStampsMapper INSTANCE = new ChangeAuditStampsMapper(); public static ChangeAuditStamps map(com.linkedin.common.ChangeAuditStamps input) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java index 6c8bdada17b242..806e8e6aadc5b1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostMapper.java @@ -1,26 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Cost; import com.linkedin.datahub.graphql.generated.CostType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import javax.annotation.Nonnull; import lombok.NonNull; public class CostMapper implements ModelMapper { - public static final CostMapper INSTANCE = new CostMapper(); + public static final CostMapper INSTANCE = new CostMapper(); - public static Cost map(@NonNull final com.linkedin.common.Cost cost) { - return INSTANCE.apply(cost); - } + public static Cost map(@NonNull final com.linkedin.common.Cost cost) { + return INSTANCE.apply(cost); + } - @Override - public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { - final Cost result = new Cost(); - result.setCostType(CostType.valueOf(cost.getCostType().name())); - result.setCostValue(CostValueMapper.map(cost.getCost())); - return result; - } + @Override + public Cost apply(@Nonnull final com.linkedin.common.Cost cost) { + final Cost result = new Cost(); + result.setCostType(CostType.valueOf(cost.getCostType().name())); + result.setCostValue(CostValueMapper.map(cost.getCost())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java index 3f41c92cd17154..56c107f7ec0596 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CostValueMapper.java @@ -2,25 +2,24 @@ import com.linkedin.datahub.graphql.generated.CostValue; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class CostValueMapper implements ModelMapper { - public static final CostValueMapper INSTANCE = new CostValueMapper(); + public static final CostValueMapper INSTANCE = new CostValueMapper(); - public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { - return INSTANCE.apply(costValue); - } + public static CostValue map(@NonNull final com.linkedin.common.CostValue costValue) { + return INSTANCE.apply(costValue); + } - @Override - public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { - final CostValue result = new CostValue(); - if (costValue.isCostCode()) { - result.setCostCode(costValue.getCostCode()); - } - if (costValue.isCostId()) { - result.setCostId(costValue.getCostId().floatValue()); - } - return result; + @Override + public CostValue apply(@NonNull final com.linkedin.common.CostValue costValue) { + final CostValue result = new CostValue(); + if (costValue.isCostCode()) { + result.setCostCode(costValue.getCostCode()); + } + if (costValue.isCostId()) { + result.setCostId(costValue.getCostId().floatValue()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java index 50e4846611a9b0..b09678ddeb42ed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/CustomPropertiesMapper.java @@ -1,36 +1,36 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.CustomPropertiesEntry; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.Map; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CustomPropertiesMapper { - public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); + public static final CustomPropertiesMapper INSTANCE = new CustomPropertiesMapper(); - public static List map(@Nonnull final Map input, @Nonnull Urn urn) { - return INSTANCE.apply(input, urn); - } + public static List map( + @Nonnull final Map input, @Nonnull Urn urn) { + return INSTANCE.apply(input, urn); + } - public List apply(@Nonnull final Map input, @Nonnull Urn urn) { - List results = new ArrayList<>(); - for (String key : input.keySet()) { - final CustomPropertiesEntry entry = new CustomPropertiesEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - entry.setAssociatedUrn(urn.toString()); - results.add(entry); - } - return results; + public List apply( + @Nonnull final Map input, @Nonnull Urn urn) { + List results = new ArrayList<>(); + for (String key : input.keySet()) { + final CustomPropertiesEntry entry = new CustomPropertiesEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + entry.setAssociatedUrn(urn.toString()); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java index 1f10cd6ee3658b..a2236f7e8586d5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DataPlatformInstanceAspectMapper.java @@ -3,14 +3,16 @@ import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DataPlatformInstanceAspectMapper implements ModelMapper { +public class DataPlatformInstanceAspectMapper + implements ModelMapper { - public static final DataPlatformInstanceAspectMapper INSTANCE = new DataPlatformInstanceAspectMapper(); + public static final DataPlatformInstanceAspectMapper INSTANCE = + new DataPlatformInstanceAspectMapper(); - public static DataPlatformInstance map(@Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { + public static DataPlatformInstance map( + @Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) { return INSTANCE.apply(dataPlatformInstance); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java index 4bbf50bb723629..7a884741669159 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/DeprecationMapper.java @@ -1,24 +1,24 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class DeprecationMapper implements ModelMapper { - public static final DeprecationMapper INSTANCE = new DeprecationMapper(); +public class DeprecationMapper + implements ModelMapper { + public static final DeprecationMapper INSTANCE = new DeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map(@Nonnull final com.linkedin.common.Deprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.common.Deprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java index 478d256df66a4b..339c6a848d9f3d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/EmbedMapper.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.graphql.generated.Embed; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class EmbedMapper implements ModelMapper { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java index 9f4517c89a6dc3..830cbb0e79d797 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/FineGrainedLineagesMapper.java @@ -1,44 +1,49 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.FineGrainedLineage; import com.linkedin.datahub.graphql.generated.SchemaFieldRef; import com.linkedin.dataset.FineGrainedLineageArray; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.SCHEMA_FIELD_ENTITY_NAME; +import javax.annotation.Nonnull; public class FineGrainedLineagesMapper { public static final FineGrainedLineagesMapper INSTANCE = new FineGrainedLineagesMapper(); - public static List map(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { + public static List map( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { return INSTANCE.apply(fineGrainedLineages); } - public List apply(@Nonnull final FineGrainedLineageArray fineGrainedLineages) { - final List result = new ArrayList<>(); + public List apply( + @Nonnull final FineGrainedLineageArray fineGrainedLineages) { + final List result = + new ArrayList<>(); if (fineGrainedLineages.size() == 0) { return result; } for (com.linkedin.dataset.FineGrainedLineage fineGrainedLineage : fineGrainedLineages) { - com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); + com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = + new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); if (fineGrainedLineage.hasUpstreams()) { - resultEntry.setUpstreams(fineGrainedLineage.getUpstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setUpstreams( + fineGrainedLineage.getUpstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } if (fineGrainedLineage.hasDownstreams()) { - resultEntry.setDownstreams(fineGrainedLineage.getDownstreams().stream() - .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) - .map(FineGrainedLineagesMapper::mapDatasetSchemaField).collect( - Collectors.toList())); + resultEntry.setDownstreams( + fineGrainedLineage.getDownstreams().stream() + .filter(entry -> entry.getEntityType().equals(SCHEMA_FIELD_ENTITY_NAME)) + .map(FineGrainedLineagesMapper::mapDatasetSchemaField) + .collect(Collectors.toList())); } result.add(resultEntry); } @@ -46,8 +51,7 @@ public List apply(@No } private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { - return new SchemaFieldRef(schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); + return new SchemaFieldRef( + schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); } } - - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java index 8bcfe7eb3b6d00..4546e0e4d8dc00 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMapper.java @@ -2,22 +2,25 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.InstitutionalMemory; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class InstitutionalMemoryMapper { - public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); + public static final InstitutionalMemoryMapper INSTANCE = new InstitutionalMemoryMapper(); - public static InstitutionalMemory map(@Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(memory, entityUrn); - } + public static InstitutionalMemory map( + @Nonnull final com.linkedin.common.InstitutionalMemory memory, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(memory, entityUrn); + } - public InstitutionalMemory apply(@Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { - final InstitutionalMemory result = new InstitutionalMemory(); - result.setElements(input.getElements().stream().map(metadata -> - InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)).collect(Collectors.toList())); - return result; - } + public InstitutionalMemory apply( + @Nonnull final com.linkedin.common.InstitutionalMemory input, @Nonnull final Urn entityUrn) { + final InstitutionalMemory result = new InstitutionalMemory(); + result.setElements( + input.getElements().stream() + .map(metadata -> InstitutionalMemoryMetadataMapper.map(metadata, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java index ba4d37173abb83..49a46185070865 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataMapper.java @@ -1,33 +1,37 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import com.linkedin.datahub.graphql.generated.CorpUser; - +import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadata; import javax.annotation.Nonnull; public class InstitutionalMemoryMetadataMapper { - public static final InstitutionalMemoryMetadataMapper INSTANCE = new InstitutionalMemoryMetadataMapper(); + public static final InstitutionalMemoryMetadataMapper INSTANCE = + new InstitutionalMemoryMetadataMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata metadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public InstitutionalMemoryMetadata apply(@Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, @Nonnull final Urn entityUrn) { - final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); - result.setUrl(input.getUrl().toString()); - result.setDescription(input.getDescription()); // deprecated field - result.setLabel(input.getDescription()); - result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); - result.setCreated(AuditStampMapper.map(input.getCreateStamp())); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + public InstitutionalMemoryMetadata apply( + @Nonnull final com.linkedin.common.InstitutionalMemoryMetadata input, + @Nonnull final Urn entityUrn) { + final InstitutionalMemoryMetadata result = new InstitutionalMemoryMetadata(); + result.setUrl(input.getUrl().toString()); + result.setDescription(input.getDescription()); // deprecated field + result.setLabel(input.getDescription()); + result.setAuthor(getAuthor(input.getCreateStamp().getActor().toString())); + result.setCreated(AuditStampMapper.map(input.getCreateStamp())); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } - private CorpUser getAuthor(String actor) { - CorpUser partialUser = new CorpUser(); - partialUser.setUrn(actor); - return partialUser; - } + private CorpUser getAuthor(String actor) { + CorpUser partialUser = new CorpUser(); + partialUser.setUrn(actor); + return partialUser; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java index 28986dcae57251..87d865471708e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryMetadataUpdateMapper.java @@ -1,31 +1,34 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import javax.annotation.Nonnull; - import com.linkedin.common.AuditStamp; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.url.Url; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryMetadataUpdate; import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class InstitutionalMemoryMetadataUpdateMapper implements ModelMapper { +public class InstitutionalMemoryMetadataUpdateMapper + implements ModelMapper { - private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = new InstitutionalMemoryMetadataUpdateMapper(); + private static final InstitutionalMemoryMetadataUpdateMapper INSTANCE = + new InstitutionalMemoryMetadataUpdateMapper(); - public static InstitutionalMemoryMetadata map(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemoryMetadata map( + @Nonnull final InstitutionalMemoryMetadataUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { - final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); - metadata.setDescription(input.getDescription()); - metadata.setUrl(new Url(input.getUrl())); - metadata.setCreateStamp(new AuditStamp() + @Override + public InstitutionalMemoryMetadata apply(@Nonnull final InstitutionalMemoryMetadataUpdate input) { + final InstitutionalMemoryMetadata metadata = new InstitutionalMemoryMetadata(); + metadata.setDescription(input.getDescription()); + metadata.setUrl(new Url(input.getUrl())); + metadata.setCreateStamp( + new AuditStamp() .setActor(CorpUserUtils.getCorpUserUrn(input.getAuthor())) - .setTime(input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt()) - ); - return metadata; - } + .setTime( + input.getCreatedAt() == null ? System.currentTimeMillis() : input.getCreatedAt())); + return metadata; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java index bf063896290eba..d8b451458e72c7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/InstitutionalMemoryUpdateMapper.java @@ -1,30 +1,30 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - -import javax.annotation.Nonnull; - import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadataArray; import com.linkedin.datahub.graphql.generated.InstitutionalMemoryUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class InstitutionalMemoryUpdateMapper implements ModelMapper { +public class InstitutionalMemoryUpdateMapper + implements ModelMapper { - private static final InstitutionalMemoryUpdateMapper INSTANCE = new InstitutionalMemoryUpdateMapper(); + private static final InstitutionalMemoryUpdateMapper INSTANCE = + new InstitutionalMemoryUpdateMapper(); - public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { - return INSTANCE.apply(input); - } + public static InstitutionalMemory map(@Nonnull final InstitutionalMemoryUpdate input) { + return INSTANCE.apply(input); + } - @Override - public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { - final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); - institutionalMemory.setElements(new InstitutionalMemoryMetadataArray( - input.getElements() - .stream() + @Override + public InstitutionalMemory apply(@Nonnull final InstitutionalMemoryUpdate input) { + final InstitutionalMemory institutionalMemory = new InstitutionalMemory(); + institutionalMemory.setElements( + new InstitutionalMemoryMetadataArray( + input.getElements().stream() .map(InstitutionalMemoryMetadataUpdateMapper::map) .collect(Collectors.toList()))); - return institutionalMemory; - } + return institutionalMemory; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java index 986954fab87dbd..37b625715edd5c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OperationMapper.java @@ -1,59 +1,66 @@ package com.linkedin.datahub.graphql.types.common.mappers; import com.linkedin.common.Operation; +import com.linkedin.common.urn.Urn; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.OperationSourceType; import com.linkedin.datahub.graphql.generated.OperationType; -import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.types.mappers.TimeSeriesAspectMapper; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.utils.GenericRecordUtils; import java.util.stream.Collectors; import javax.annotation.Nonnull; -public class OperationMapper implements TimeSeriesAspectMapper { +public class OperationMapper + implements TimeSeriesAspectMapper { - public static final OperationMapper INSTANCE = new OperationMapper(); + public static final OperationMapper INSTANCE = new OperationMapper(); - public static com.linkedin.datahub.graphql.generated.Operation map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); - } + public static com.linkedin.datahub.graphql.generated.Operation map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.Operation apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + Operation gmsProfile = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + Operation.class); + + final com.linkedin.datahub.graphql.generated.Operation result = + new com.linkedin.datahub.graphql.generated.Operation(); - @Override - public com.linkedin.datahub.graphql.generated.Operation apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - Operation gmsProfile = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - Operation.class); - - final com.linkedin.datahub.graphql.generated.Operation result = - new com.linkedin.datahub.graphql.generated.Operation(); - - result.setTimestampMillis(gmsProfile.getTimestampMillis()); - result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); - if (gmsProfile.hasActor()) { - result.setActor(gmsProfile.getActor().toString()); - } - result.setOperationType(OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); - result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); - if (gmsProfile.hasSourceType()) { - result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); - } - if (gmsProfile.hasPartitionSpec()) { - result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); - } - if (gmsProfile.hasCustomProperties()) { - result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); - } - if (gmsProfile.hasNumAffectedRows()) { - result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); - } - if (gmsProfile.hasAffectedDatasets()) { - result.setAffectedDatasets(gmsProfile.getAffectedDatasets().stream().map(Urn::toString).collect(Collectors.toList())); - } - - return result; + result.setTimestampMillis(gmsProfile.getTimestampMillis()); + result.setLastUpdatedTimestamp(gmsProfile.getLastUpdatedTimestamp()); + if (gmsProfile.hasActor()) { + result.setActor(gmsProfile.getActor().toString()); } + result.setOperationType( + OperationType.valueOf(OperationType.class, gmsProfile.getOperationType().toString())); + result.setCustomOperationType(gmsProfile.getCustomOperationType(GetMode.NULL)); + if (gmsProfile.hasSourceType()) { + result.setSourceType(OperationSourceType.valueOf(gmsProfile.getSourceType().toString())); + } + if (gmsProfile.hasPartitionSpec()) { + result.setPartition(gmsProfile.getPartitionSpec().getPartition(GetMode.NULL)); + } + if (gmsProfile.hasCustomProperties()) { + result.setCustomProperties(StringMapMapper.map(gmsProfile.getCustomProperties())); + } + if (gmsProfile.hasNumAffectedRows()) { + result.setNumAffectedRows(gmsProfile.getNumAffectedRows()); + } + if (gmsProfile.hasAffectedDatasets()) { + result.setAffectedDatasets( + gmsProfile.getAffectedDatasets().stream() + .map(Urn::toString) + .collect(Collectors.toList())); + } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java index 181bdc176fb941..ea15aefdad3b79 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -10,51 +12,49 @@ import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class OwnerMapper { - public static final OwnerMapper INSTANCE = new OwnerMapper(); + public static final OwnerMapper INSTANCE = new OwnerMapper(); - public static Owner map(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(owner, entityUrn); + public static Owner map( + @Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(owner, entityUrn); + } + + public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { + final Owner result = new Owner(); + // Deprecated + result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); + + if (owner.getTypeUrn() == null) { + OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); + owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); } - public Owner apply(@Nonnull final com.linkedin.common.Owner owner, @Nonnull final Urn entityUrn) { - final Owner result = new Owner(); - // Deprecated - result.setType(Enum.valueOf(OwnershipType.class, owner.getType().toString())); - - if (owner.getTypeUrn() == null) { - OwnershipType ownershipType = OwnershipType.valueOf(owner.getType().toString()); - owner.setTypeUrn(UrnUtils.getUrn(mapOwnershipTypeToEntity(ownershipType.name()))); - } - - if (owner.getTypeUrn() != null) { - OwnershipTypeEntity entity = new OwnershipTypeEntity(); - entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); - entity.setUrn(owner.getTypeUrn().toString()); - result.setOwnershipType(entity); - } - if (owner.getOwner().getEntityType().equals("corpuser")) { - CorpUser partialOwner = new CorpUser(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } else { - CorpGroup partialOwner = new CorpGroup(); - partialOwner.setUrn(owner.getOwner().toString()); - result.setOwner(partialOwner); - } - if (owner.hasSource()) { - result.setSource(OwnershipSourceMapper.map(owner.getSource())); - } - result.setAssociatedUrn(entityUrn.toString()); - return result; + if (owner.getTypeUrn() != null) { + OwnershipTypeEntity entity = new OwnershipTypeEntity(); + entity.setType(EntityType.CUSTOM_OWNERSHIP_TYPE); + entity.setUrn(owner.getTypeUrn().toString()); + result.setOwnershipType(entity); + } + if (owner.getOwner().getEntityType().equals("corpuser")) { + CorpUser partialOwner = new CorpUser(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } else { + CorpGroup partialOwner = new CorpGroup(); + partialOwner.setUrn(owner.getOwner().toString()); + result.setOwner(partialOwner); + } + if (owner.hasSource()) { + result.setSource(OwnershipSourceMapper.map(owner.getSource())); } + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java index d978abee5bdfc1..a38c16d02f1215 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnerUpdateMapper.java @@ -1,56 +1,56 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import com.linkedin.common.urn.UrnUtils; -import javax.annotation.Nonnull; - import com.linkedin.common.Owner; import com.linkedin.common.OwnershipSource; import com.linkedin.common.OwnershipSourceType; import com.linkedin.common.OwnershipType; +import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.generated.OwnerUpdate; -import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.corpgroup.CorpGroupUtils; +import com.linkedin.datahub.graphql.types.corpuser.CorpUserUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import com.linkedin.common.urn.Urn; - import java.net.URISyntaxException; +import javax.annotation.Nonnull; public class OwnerUpdateMapper implements ModelMapper { - private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); - - public static Owner map(@Nonnull final OwnerUpdate input) { - return INSTANCE.apply(input); + private static final OwnerUpdateMapper INSTANCE = new OwnerUpdateMapper(); + + public static Owner map(@Nonnull final OwnerUpdate input) { + return INSTANCE.apply(input); + } + + @Override + public Owner apply(@Nonnull final OwnerUpdate input) { + final Owner owner = new Owner(); + try { + if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { + owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); + } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { + owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); + } + } catch (URISyntaxException e) { + e.printStackTrace(); } - - @Override - public Owner apply(@Nonnull final OwnerUpdate input) { - final Owner owner = new Owner(); - try { - if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpuser")) { - owner.setOwner(CorpUserUtils.getCorpUserUrn(input.getOwner())); - } else if (Urn.createFromString(input.getOwner()).getEntityType().equals("corpGroup")) { - owner.setOwner(CorpGroupUtils.getCorpGroupUrn(input.getOwner())); - } - } catch (URISyntaxException e) { - e.printStackTrace(); - } - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - } - // For backwards compatibility we have to always set the deprecated type. - // If the type exists we assume it's an old ownership type that we can map to. - // Else if it's a net new custom ownership type set old type to CUSTOM. - OwnershipType type = input.getType() != null ? OwnershipType.valueOf(input.getType().toString()) + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + } + // For backwards compatibility we have to always set the deprecated type. + // If the type exists we assume it's an old ownership type that we can map to. + // Else if it's a net new custom ownership type set old type to CUSTOM. + OwnershipType type = + input.getType() != null + ? OwnershipType.valueOf(input.getType().toString()) : OwnershipType.CUSTOM; - owner.setType(type); - - if (input.getOwnershipTypeUrn() != null) { - owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); - owner.setType(OwnershipType.CUSTOM); - } + owner.setType(type); - owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); - return owner; + if (input.getOwnershipTypeUrn() != null) { + owner.setTypeUrn(UrnUtils.getUrn(input.getOwnershipTypeUrn())); + owner.setType(OwnershipType.CUSTOM); } + + owner.setSource(new OwnershipSource().setType(OwnershipSourceType.SERVICE)); + return owner; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java index 6614cfb28a4784..31f637a047798e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipMapper.java @@ -2,30 +2,31 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Ownership; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class OwnershipMapper { - public static final OwnershipMapper INSTANCE = new OwnershipMapper(); + public static final OwnershipMapper INSTANCE = new OwnershipMapper(); - public static Ownership map(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(ownership, entityUrn); - } + public static Ownership map( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(ownership, entityUrn); + } - public Ownership apply(@Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { - final Ownership result = new Ownership(); - result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); - result.setOwners(ownership.getOwners() - .stream() - .map(owner -> OwnerMapper.map(owner, entityUrn)) - .collect(Collectors.toList())); - return result; - } + public Ownership apply( + @Nonnull final com.linkedin.common.Ownership ownership, @Nonnull final Urn entityUrn) { + final Ownership result = new Ownership(); + result.setLastModified(AuditStampMapper.map(ownership.getLastModified())); + result.setOwners( + ownership.getOwners().stream() + .map(owner -> OwnerMapper.map(owner, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java index abcc67c35f92af..75eaffb850a8b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipSourceMapper.java @@ -3,28 +3,28 @@ import com.linkedin.datahub.graphql.generated.OwnershipSource; import com.linkedin.datahub.graphql.generated.OwnershipSourceType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class OwnershipSourceMapper implements ModelMapper { +public class OwnershipSourceMapper + implements ModelMapper { - public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); + public static final OwnershipSourceMapper INSTANCE = new OwnershipSourceMapper(); - public static OwnershipSource map(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - return INSTANCE.apply(ownershipSource); - } + public static OwnershipSource map( + @Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + return INSTANCE.apply(ownershipSource); + } - @Override - public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { - final OwnershipSource result = new OwnershipSource(); - result.setUrl(ownershipSource.getUrl()); - result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); - return result; - } + @Override + public OwnershipSource apply(@Nonnull final com.linkedin.common.OwnershipSource ownershipSource) { + final OwnershipSource result = new OwnershipSource(); + result.setUrl(ownershipSource.getUrl()); + result.setType(Enum.valueOf(OwnershipSourceType.class, ownershipSource.getType().toString())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java index 1162c69d74938f..97afbc7ddf8556 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/OwnershipUpdateMapper.java @@ -1,7 +1,5 @@ package com.linkedin.datahub.graphql.types.common.mappers; -import java.util.stream.Collectors; - import com.linkedin.common.AuditStamp; import com.linkedin.common.OwnerArray; import com.linkedin.common.Ownership; @@ -9,31 +7,30 @@ import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.generated.OwnershipUpdate; import com.linkedin.datahub.graphql.types.mappers.InputModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; public class OwnershipUpdateMapper implements InputModelMapper { - private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); + private static final OwnershipUpdateMapper INSTANCE = new OwnershipUpdateMapper(); - public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - return INSTANCE.apply(input, actor); - } + public static Ownership map(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + return INSTANCE.apply(input, actor); + } - @Override - public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { - final Ownership ownership = new Ownership(); + @Override + public Ownership apply(@NonNull final OwnershipUpdate input, @NonNull final Urn actor) { + final Ownership ownership = new Ownership(); - ownership.setOwners(new OwnerArray(input.getOwners() - .stream() - .map(OwnerUpdateMapper::map) - .collect(Collectors.toList()))); + ownership.setOwners( + new OwnerArray( + input.getOwners().stream().map(OwnerUpdateMapper::map).collect(Collectors.toList()))); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - ownership.setLastModified(auditStamp); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); + ownership.setLastModified(auditStamp); - return ownership; - } + return ownership; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java index f3ac008734339e..e2d29d02974491 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SearchFlagsInputMapper.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.common.mappers; - import com.linkedin.datahub.graphql.generated.SearchFlags; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps GraphQL SearchFlags to Pegasus * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class SearchFlagsInputMapper implements ModelMapper { +public class SearchFlagsInputMapper + implements ModelMapper { public static final SearchFlagsInputMapper INSTANCE = new SearchFlagsInputMapper(); - public static com.linkedin.metadata.query.SearchFlags map(@Nonnull final SearchFlags searchFlags) { + public static com.linkedin.metadata.query.SearchFlags map( + @Nonnull final SearchFlags searchFlags) { return INSTANCE.apply(searchFlags); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java index 942171017cea4a..0758daf5df2e77 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SiblingsMapper.java @@ -5,13 +5,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class SiblingsMapper implements ModelMapper { +public class SiblingsMapper + implements ModelMapper { public static final SiblingsMapper INSTANCE = new SiblingsMapper(); @@ -23,10 +23,8 @@ public static SiblingProperties map(@Nonnull final com.linkedin.common.Siblings public SiblingProperties apply(@Nonnull final com.linkedin.common.Siblings siblings) { final SiblingProperties result = new SiblingProperties(); result.setIsPrimary(siblings.isPrimary()); - result.setSiblings(siblings.getSiblings() - .stream() - .map(UrnToEntityMapper::map) - .collect(Collectors.toList())); + result.setSiblings( + siblings.getSiblings().stream().map(UrnToEntityMapper::map).collect(Collectors.toList())); return result; } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java index 25d01d8de0e4c7..2d1efdffc496c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StatusMapper.java @@ -2,21 +2,20 @@ import com.linkedin.datahub.graphql.generated.Status; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; public class StatusMapper implements ModelMapper { - public static final StatusMapper INSTANCE = new StatusMapper(); + public static final StatusMapper INSTANCE = new StatusMapper(); - public static Status map(@Nonnull final com.linkedin.common.Status metadata) { - return INSTANCE.apply(metadata); - } + public static Status map(@Nonnull final com.linkedin.common.Status metadata) { + return INSTANCE.apply(metadata); + } - @Override - public Status apply(@Nonnull final com.linkedin.common.Status input) { - final Status result = new Status(); - result.setRemoved(input.isRemoved()); - return result; - } + @Override + public Status apply(@Nonnull final com.linkedin.common.Status input) { + final Status result = new Status(); + result.setRemoved(input.isRemoved()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java index 32c49a20104142..0e8d6822b7d091 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/StringMapMapper.java @@ -7,29 +7,28 @@ import java.util.Map; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class StringMapMapper implements ModelMapper, List> { - public static final StringMapMapper INSTANCE = new StringMapMapper(); + public static final StringMapMapper INSTANCE = new StringMapMapper(); - public static List map(@Nonnull final Map input) { - return INSTANCE.apply(input); - } + public static List map(@Nonnull final Map input) { + return INSTANCE.apply(input); + } - @Override - public List apply(@Nonnull final Map input) { - List results = new ArrayList<>(); - for (String key : input.keySet()) { - final StringMapEntry entry = new StringMapEntry(); - entry.setKey(key); - entry.setValue(input.get(key)); - results.add(entry); - } - return results; + @Override + public List apply(@Nonnull final Map input) { + List results = new ArrayList<>(); + for (String key : input.keySet()) { + final StringMapEntry entry = new StringMapEntry(); + entry.setKey(key); + entry.setValue(input.get(key)); + results.add(entry); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java index 9aa94eae629990..55294e4b46822c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/SubTypesMapper.java @@ -5,17 +5,20 @@ import java.util.ArrayList; import javax.annotation.Nonnull; -public class SubTypesMapper implements ModelMapper { +public class SubTypesMapper + implements ModelMapper { public static final SubTypesMapper INSTANCE = new SubTypesMapper(); - public static com.linkedin.datahub.graphql.generated.SubTypes map(@Nonnull final SubTypes metadata) { + public static com.linkedin.datahub.graphql.generated.SubTypes map( + @Nonnull final SubTypes metadata) { return INSTANCE.apply(metadata); } @Override public com.linkedin.datahub.graphql.generated.SubTypes apply(@Nonnull final SubTypes input) { - final com.linkedin.datahub.graphql.generated.SubTypes result = new com.linkedin.datahub.graphql.generated.SubTypes(); + final com.linkedin.datahub.graphql.generated.SubTypes result = + new com.linkedin.datahub.graphql.generated.SubTypes(); result.setTypeNames(new ArrayList<>(input.getTypeNames())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java index 8359f1ec86f34e..4fdf7edea07d9f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java @@ -4,22 +4,24 @@ import java.util.List; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class UpstreamLineagesMapper { public static final UpstreamLineagesMapper INSTANCE = new UpstreamLineagesMapper(); - public static List map(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + public static List map( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { return INSTANCE.apply(upstreamLineage); } - public List apply(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { - if (!upstreamLineage.hasFineGrainedLineages() || upstreamLineage.getFineGrainedLineages() == null) { + public List apply( + @Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + if (!upstreamLineage.hasFineGrainedLineages() + || upstreamLineage.getFineGrainedLineages() == null) { return new ArrayList<>(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java index 34bf56a396b620..4c452af1262012 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UrnToEntityMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.common.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Assertion; import com.linkedin.datahub.graphql.generated.Chart; @@ -35,10 +37,7 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class UrnToEntityMapper implements ModelMapper { +public class UrnToEntityMapper implements ModelMapper { public static final UrnToEntityMapper INSTANCE = new UrnToEntityMapper(); public static Entity map(@Nonnull final com.linkedin.common.urn.Urn urn) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java index 1e284efdb610f1..0b156f11e8834b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/MappingHelper.java @@ -7,14 +7,10 @@ import lombok.AllArgsConstructor; import lombok.Getter; - @AllArgsConstructor public class MappingHelper { - @Nonnull - private final EnvelopedAspectMap _aspectMap; - @Getter - @Nonnull - private final O result; + @Nonnull private final EnvelopedAspectMap _aspectMap; + @Getter @Nonnull private final O result; public void mapToResult(@Nonnull String aspectName, @Nonnull BiConsumer consumer) { if (_aspectMap.containsKey(aspectName)) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java index 7d1b374e1f9b6c..00e339a0320ef7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/RunInfo.java @@ -5,7 +5,6 @@ import lombok.Getter; import lombok.Setter; - @Data @Setter @Getter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java index d08300d648c323..46df032cbffbff 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/SystemMetadataUtils.java @@ -1,19 +1,17 @@ package com.linkedin.datahub.graphql.types.common.mappers.util; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; + import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.mxe.SystemMetadata; - import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtils { - private SystemMetadataUtils() { - } + private SystemMetadataUtils() {} @Nullable public static Long getLastIngestedTime(@Nonnull EnvelopedAspectMap aspectMap) { @@ -28,7 +26,8 @@ public static String getLastIngestedRunId(@Nonnull EnvelopedAspectMap aspectMap) } /** - * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects present for the entity. + * Returns a sorted list of all of the most recent ingestion runs based on the most recent aspects + * present for the entity. */ @Nonnull public static List getLastIngestionRuns(@Nonnull EnvelopedAspectMap aspectMap) { @@ -36,12 +35,16 @@ public static List getLastIngestionRuns(@Nonnull EnvelopedAspectMap asp for (String aspect : aspectMap.keySet()) { if (aspectMap.get(aspect).hasSystemMetadata()) { SystemMetadata systemMetadata = aspectMap.get(aspect).getSystemMetadata(); - if (systemMetadata.hasLastRunId() && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + if (systemMetadata.hasLastRunId() + && !systemMetadata.getLastRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getLastRunId(); RunInfo run = new RunInfo(runId, lastObserved); runs.add(run); - } else if (systemMetadata.hasRunId() && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) && systemMetadata.hasLastObserved()) { + } else if (systemMetadata.hasRunId() + && !systemMetadata.getRunId().equals(DEFAULT_RUN_ID) + && systemMetadata.hasLastObserved()) { // Handle the legacy case: Check original run ids. Long lastObserved = systemMetadata.getLastObserved(); String runId = systemMetadata.getRunId(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java index 108aa7ed5b0c9e..606cebba0880f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/util/UpdateMappingHelper.java @@ -6,7 +6,6 @@ import com.linkedin.mxe.MetadataChangeProposal; import lombok.AllArgsConstructor; - @AllArgsConstructor public class UpdateMappingHelper { private final String entityName; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java index 20cfe6ac461273..1200493666a592 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/ContainerType.java @@ -18,8 +18,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.net.URISyntaxException; @@ -33,31 +33,31 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; - -public class ContainerType implements SearchableEntityType, +public class ContainerType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME, - Constants.SUB_TYPES_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.GLOSSARY_TERMS_ASPECT_NAME, - Constants.CONTAINER_ASPECT_NAME, - Constants.DOMAINS_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.DATA_PRODUCTS_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + Constants.CONTAINER_PROPERTIES_ASPECT_NAME, + Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME, + Constants.SUB_TYPES_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.GLOSSARY_TERMS_ASPECT_NAME, + Constants.CONTAINER_ASPECT_NAME, + Constants.DOMAINS_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.DATA_PRODUCTS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "container"; private final EntityClient _entityClient; - public ContainerType(final EntityClient entityClient) { + public ContainerType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -77,28 +77,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List containerUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List containerUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.CONTAINER_ENTITY_NAME, - new HashSet<>(containerUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.CONTAINER_ENTITY_NAME, + new HashSet<>(containerUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : containerUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(ContainerMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(ContainerMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Container", e); @@ -114,24 +116,36 @@ private Urn getUrn(final String urnStr) { } @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java index b81259e78be3e7..07594c53c68312 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/container/mappers/ContainerMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.container.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -15,11 +17,11 @@ import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -33,9 +35,6 @@ import com.linkedin.metadata.Constants; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class ContainerMapper { @Nullable @@ -49,46 +48,61 @@ public static Container map(final EntityResponse entityResponse) { result.setUrn(entityUrn.toString()); result.setType(EntityType.CONTAINER); - final EnvelopedAspect envelopedPlatformInstance = aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); + final EnvelopedAspect envelopedPlatformInstance = + aspects.get(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME); if (envelopedPlatformInstance != null) { final DataMap data = envelopedPlatformInstance.getValue().data(); result.setPlatform(mapPlatform(new DataPlatformInstance(data))); - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); } else { final DataPlatform unknownPlatform = new DataPlatform(); unknownPlatform.setUrn(UNKNOWN_DATA_PLATFORM); result.setPlatform(unknownPlatform); } - final EnvelopedAspect envelopedContainerProperties = aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedContainerProperties = + aspects.get(Constants.CONTAINER_PROPERTIES_ASPECT_NAME); if (envelopedContainerProperties != null) { - result.setProperties(mapContainerProperties(new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); + result.setProperties( + mapContainerProperties( + new ContainerProperties(envelopedContainerProperties.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedEditableContainerProperties = aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedEditableContainerProperties = + aspects.get(Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME); if (envelopedEditableContainerProperties != null) { - result.setEditableProperties(mapContainerEditableProperties(new EditableContainerProperties(envelopedEditableContainerProperties.getValue().data()))); + result.setEditableProperties( + mapContainerEditableProperties( + new EditableContainerProperties( + envelopedEditableContainerProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } final EnvelopedAspect envelopedTags = aspects.get(Constants.GLOBAL_TAGS_ASPECT_NAME); if (envelopedTags != null) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(envelopedTags.getValue().data()), entityUrn); result.setTags(globalTags); } final EnvelopedAspect envelopedTerms = aspects.get(Constants.GLOSSARY_TERMS_ASPECT_NAME); if (envelopedTerms != null) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(envelopedTerms.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } final EnvelopedAspect statusAspect = aspects.get(Constants.STATUS_ASPECT_NAME); @@ -103,12 +117,13 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedContainer = aspects.get(Constants.CONTAINER_ASPECT_NAME); if (envelopedContainer != null) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(envelopedContainer.getValue().data()); - result.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(envelopedContainer.getValue().data()); + result.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } final EnvelopedAspect envelopedDomains = aspects.get(Constants.DOMAINS_ASPECT_NAME); @@ -120,21 +135,25 @@ public static Container map(final EntityResponse entityResponse) { final EnvelopedAspect envelopedDeprecation = aspects.get(Constants.DEPRECATION_ASPECT_NAME); if (envelopedDeprecation != null) { - result.setDeprecation(DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); + result.setDeprecation( + DeprecationMapper.map(new Deprecation(envelopedDeprecation.getValue().data()))); } return result; } - private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties(final ContainerProperties gmsProperties, Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = new com.linkedin.datahub.graphql.generated.ContainerProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerProperties mapContainerProperties( + final ContainerProperties gmsProperties, Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.ContainerProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.ContainerProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { propertiesResult.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - propertiesResult.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + propertiesResult.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } if (gmsProperties.hasQualifiedName()) { propertiesResult.setQualifiedName(gmsProperties.getQualifiedName().toString()); @@ -143,10 +162,11 @@ private static com.linkedin.datahub.graphql.generated.ContainerProperties mapCon return propertiesResult; } - private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties mapContainerEditableProperties( - final EditableContainerProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.ContainerEditableProperties editableContainerProperties = - new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); + private static com.linkedin.datahub.graphql.generated.ContainerEditableProperties + mapContainerEditableProperties(final EditableContainerProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.ContainerEditableProperties + editableContainerProperties = + new com.linkedin.datahub.graphql.generated.ContainerEditableProperties(); editableContainerProperties.setDescription(gmsProperties.getDescription()); return editableContainerProperties; } @@ -158,5 +178,5 @@ private static DataPlatform mapPlatform(final DataPlatformInstance platformInsta return dummyPlatform; } - private ContainerMapper() { } + private ContainerMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java index 285a119be0d43c..371cf6b280c20e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpgroup; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -7,8 +12,6 @@ import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.CorpGroup; @@ -27,8 +30,8 @@ import com.linkedin.identity.CorpGroupEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -42,155 +45,193 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - -public class CorpGroupType implements SearchableEntityType, MutableType { - - private final EntityClient _entityClient; - - public CorpGroupType(final EntityClient entityClient) { - _entityClient = entityClient; +public class CorpGroupType + implements SearchableEntityType, + MutableType { + + private final EntityClient _entityClient; + + public CorpGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return CorpGroup.class; + } + + public Class inputClass() { + return CorpGroupUpdateInput.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_GROUP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + try { + final List corpGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map corpGroupMap = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + new HashSet<>(corpGroupUrns), + null, + context.getAuthentication()); + + final List results = new ArrayList<>(); + for (Urn urn : corpGroupUrns) { + results.add(corpGroupMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpGroup -> + gmsCorpGroup == null + ? null + : DataFetcherResult.newResult() + .data(CorpGroupMapper.map(gmsCorpGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load CorpGroup", e); } - - @Override - public Class objectClass() { - return CorpGroup.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpGroup", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public CorpGroup update( + @Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Urn groupUrn = Urn.createFromString(urn); + Map gmsResponse = + _entityClient.batchGetV2( + CORP_GROUP_ENTITY_NAME, + ImmutableSet.of(groupUrn), + ImmutableSet.of(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), + context.getAuthentication()); + + CorpGroupEditableInfo existingCorpGroupEditableInfo = null; + if (gmsResponse.containsKey(groupUrn) + && gmsResponse + .get(groupUrn) + .getAspects() + .containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { + existingCorpGroupEditableInfo = + new CorpGroupEditableInfo( + gmsResponse + .get(groupUrn) + .getAspects() + .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME) + .getValue() + .data()); + } + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, + mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); } - - public Class inputClass() { - return CorpGroupUpdateInput.class; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpGroupUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final CorpGroupUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getDescription() != null) { + // Requires the Update Docs privilege. + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { + // Requires the Update Contact info privilege. + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); } - @Override - public EntityType type() { - return EntityType.CORP_GROUP; - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public List> batchLoad(final List urns, final QueryContext context) { - try { - final List corpGroupUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map corpGroupMap = _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, - new HashSet<>(corpGroupUrns), null, context.getAuthentication()); - - final List results = new ArrayList<>(); - for (Urn urn : corpGroupUrns) { - results.add(corpGroupMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpGroup -> gmsCorpGroup == null ? null - : DataFetcherResult.newResult().data(CorpGroupMapper.map(gmsCorpGroup)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load CorpGroup", e); - } - } + private RecordTemplate mapCorpGroupEditableInfo( + CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { + CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult - searchResult = _entityClient.search("corpGroup", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getDescription() != null) { + result.setDescription(input.getDescription()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpGroup", query, filters, limit, - context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - @Override - public CorpGroup update(@Nonnull String urn, @Nonnull CorpGroupUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Urn groupUrn = Urn.createFromString(urn); - Map gmsResponse = - _entityClient.batchGetV2(CORP_GROUP_ENTITY_NAME, ImmutableSet.of(groupUrn), ImmutableSet.of( - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME), - context.getAuthentication()); - - CorpGroupEditableInfo existingCorpGroupEditableInfo = null; - if (gmsResponse.containsKey(groupUrn) && gmsResponse.get(groupUrn).getAspects().containsKey(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME)) { - existingCorpGroupEditableInfo = new CorpGroupEditableInfo(gmsResponse.get(groupUrn).getAspects() - .get(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME).getValue().data()); - } - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, mapCorpGroupEditableInfo(input, existingCorpGroupEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorizedToUpdate(String urn, CorpGroupUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpGroupUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getDescription() != null) { - // Requires the Update Docs privilege. - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } else if (updateInput.getSlack() != null || updateInput.getEmail() != null) { - // Requires the Update Contact info privilege. - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } - - private RecordTemplate mapCorpGroupEditableInfo(CorpGroupUpdateInput input, @Nullable CorpGroupEditableInfo existing) { - CorpGroupEditableInfo result = existing != null ? existing : new CorpGroupEditableInfo(); - - if (input.getDescription() != null) { - result.setDescription(input.getDescription()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - return result; + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java index c1cd33b0077f63..318506d9d61fae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/CorpGroupUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpgroup; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpGroupUrn; +import java.net.URISyntaxException; public class CorpGroupUtils { - private CorpGroupUtils() { } + private CorpGroupUtils() {} - public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpGroupUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); - } + public static CorpGroupUrn getCorpGroupUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpGroupUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpGroupUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java index f476794bc545ed..a6e14535cf0b7f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupEditablePropertiesMapper.java @@ -3,28 +3,32 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupEditablePropertiesMapper implements ModelMapper { +public class CorpGroupEditablePropertiesMapper + implements ModelMapper< + com.linkedin.identity.CorpGroupEditableInfo, CorpGroupEditableProperties> { - public static final CorpGroupEditablePropertiesMapper INSTANCE = new CorpGroupEditablePropertiesMapper(); + public static final CorpGroupEditablePropertiesMapper INSTANCE = + new CorpGroupEditablePropertiesMapper(); - public static CorpGroupEditableProperties map(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public static CorpGroupEditableProperties map( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { return INSTANCE.apply(corpGroupEditableInfo); } @Override - public CorpGroupEditableProperties apply(@Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { + public CorpGroupEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpGroupEditableInfo corpGroupEditableInfo) { final CorpGroupEditableProperties result = new CorpGroupEditableProperties(); result.setDescription(corpGroupEditableInfo.getDescription(GetMode.DEFAULT)); result.setSlack(corpGroupEditableInfo.getSlack(GetMode.DEFAULT)); result.setEmail(corpGroupEditableInfo.getEmail(GetMode.DEFAULT)); return result; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java index 3d2d4aea2b0015..04d0cc8ce94e63 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupInfoMapper.java @@ -1,48 +1,58 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; -import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpGroupInfo; +import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupInfoMapper implements ModelMapper { +public class CorpGroupInfoMapper + implements ModelMapper { - public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); + public static final CorpGroupInfoMapper INSTANCE = new CorpGroupInfoMapper(); - public static CorpGroupInfo map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { - return INSTANCE.apply(corpGroupInfo); - } + public static CorpGroupInfo map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + return INSTANCE.apply(corpGroupInfo); + } - @Override - public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { - final CorpGroupInfo result = new CorpGroupInfo(); - result.setEmail(info.getEmail()); - result.setDescription(info.getDescription()); - result.setDisplayName(info.getDisplayName()); - if (info.hasAdmins()) { - result.setAdmins(info.getAdmins().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasMembers()) { - result.setMembers(info.getMembers().stream().map(urn -> { - final CorpUser corpUser = new CorpUser(); - corpUser.setUrn(urn.toString()); - return corpUser; - }).collect(Collectors.toList())); - } - if (info.hasGroups()) { - result.setGroups(info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); - } - return result; + @Override + public CorpGroupInfo apply(@Nonnull final com.linkedin.identity.CorpGroupInfo info) { + final CorpGroupInfo result = new CorpGroupInfo(); + result.setEmail(info.getEmail()); + result.setDescription(info.getDescription()); + result.setDisplayName(info.getDisplayName()); + if (info.hasAdmins()) { + result.setAdmins( + info.getAdmins().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasMembers()) { + result.setMembers( + info.getMembers().stream() + .map( + urn -> { + final CorpUser corpUser = new CorpUser(); + corpUser.setUrn(urn.toString()); + return corpUser; + }) + .collect(Collectors.toList())); + } + if (info.hasGroups()) { + result.setGroups( + info.getGroups().stream().map(urn -> (urn.toString())).collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java index 0fb1b66c644d78..52e200d19923a9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpgroup.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Origin; import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; @@ -16,78 +18,79 @@ import com.linkedin.metadata.key.CorpGroupKey; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CorpGroupMapper implements ModelMapper { - public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); + public static final CorpGroupMapper INSTANCE = new CorpGroupMapper(); - public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static CorpGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { - final CorpGroup result = new CorpGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public CorpGroup apply(@Nonnull final EntityResponse entityResponse) { + final CorpGroup result = new CorpGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); - mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); - mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); - if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { - mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); - } else { - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - result.setOrigin(mappedGroupOrigin); - } - return mappingHelper.getResult(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_GROUP_KEY_ASPECT_NAME, this::mapCorpGroupKey); + mappingHelper.mapToResult(CORP_GROUP_INFO_ASPECT_NAME, this::mapCorpGroupInfo); + mappingHelper.mapToResult(CORP_GROUP_EDITABLE_INFO_ASPECT_NAME, this::mapCorpGroupEditableInfo); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, (entity, dataMap) -> this.mapOwnership(entity, dataMap, entityUrn)); + if (aspectMap.containsKey(ORIGIN_ASPECT_NAME)) { + mappingHelper.mapToResult(ORIGIN_ASPECT_NAME, this::mapEntityOriginType); + } else { + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + result.setOrigin(mappedGroupOrigin); } + return mappingHelper.getResult(); + } - private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); - corpGroup.setName(corpGroupKey.getName()); - } + private void mapCorpGroupKey(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupKey corpGroupKey = new CorpGroupKey(dataMap); + corpGroup.setName(corpGroupKey.getName()); + } - private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); - corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); - corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); - } + private void mapCorpGroupInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + CorpGroupInfo corpGroupInfo = new CorpGroupInfo(dataMap); + corpGroup.setProperties(CorpGroupPropertiesMapper.map(corpGroupInfo)); + corpGroup.setInfo(CorpGroupInfoMapper.map(corpGroupInfo)); + } - private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - corpGroup.setEditableProperties(CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); - } + private void mapCorpGroupEditableInfo(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + corpGroup.setEditableProperties( + CorpGroupEditablePropertiesMapper.map(new CorpGroupEditableInfo(dataMap))); + } - private void mapOwnership(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); - } + private void mapOwnership( + @Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + corpGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)); + } - private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { - Origin groupOrigin = new Origin(dataMap); - com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = - new com.linkedin.datahub.graphql.generated.Origin(); - if (groupOrigin.hasType()) { - mappedGroupOrigin.setType( - com.linkedin.datahub.graphql.generated.OriginType.valueOf(groupOrigin.getType().toString())); - } else { - mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); - } - if (groupOrigin.hasExternalType()) { - mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); - } - corpGroup.setOrigin(mappedGroupOrigin); + private void mapEntityOriginType(@Nonnull CorpGroup corpGroup, @Nonnull DataMap dataMap) { + Origin groupOrigin = new Origin(dataMap); + com.linkedin.datahub.graphql.generated.Origin mappedGroupOrigin = + new com.linkedin.datahub.graphql.generated.Origin(); + if (groupOrigin.hasType()) { + mappedGroupOrigin.setType( + com.linkedin.datahub.graphql.generated.OriginType.valueOf( + groupOrigin.getType().toString())); + } else { + mappedGroupOrigin.setType(com.linkedin.datahub.graphql.generated.OriginType.UNKNOWN); + } + if (groupOrigin.hasExternalType()) { + mappedGroupOrigin.setExternalType(groupOrigin.getExternalType()); } + corpGroup.setOrigin(mappedGroupOrigin); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java index 266d8be67cb061..29d0482863971c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpgroup/mappers/CorpGroupPropertiesMapper.java @@ -3,19 +3,20 @@ import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.CorpGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpGroupPropertiesMapper implements ModelMapper { +public class CorpGroupPropertiesMapper + implements ModelMapper { public static final CorpGroupPropertiesMapper INSTANCE = new CorpGroupPropertiesMapper(); - public static CorpGroupProperties map(@Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { + public static CorpGroupProperties map( + @Nonnull final com.linkedin.identity.CorpGroupInfo corpGroupInfo) { return INSTANCE.apply(corpGroupInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java index db2b49c790f57e..5749eef970fce8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.corpuser; +import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.url.Url; import com.linkedin.common.urn.Urn; @@ -8,8 +13,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.featureflags.FeatureFlags; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; @@ -29,8 +32,8 @@ import com.linkedin.identity.CorpUserEditableInfo; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.execution.DataFetcherResult; @@ -45,176 +48,206 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class CorpUserType implements SearchableEntityType, MutableType { +public class CorpUserType + implements SearchableEntityType, MutableType { + + private final EntityClient _entityClient; + private final FeatureFlags _featureFlags; + + public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { + _entityClient = entityClient; + _featureFlags = featureFlags; + } + + @Override + public Class objectClass() { + return CorpUser.class; + } + + @Override + public EntityType type() { + return EntityType.CORP_USER; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + try { + final List corpUserUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map corpUserMap = + _entityClient.batchGetV2( + CORP_USER_ENTITY_NAME, + new HashSet<>(corpUserUrns), + null, + context.getAuthentication()); + + final List results = new ArrayList<>(); + for (Urn urn : corpUserUrns) { + results.add(corpUserMap.getOrDefault(urn, null)); + } + return results.stream() + .map( + gmsCorpUser -> + gmsCorpUser == null + ? null + : DataFetcherResult.newResult() + .data(CorpUserMapper.map(gmsCorpUser, _featureFlags)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); + } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + "corpuser", + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + public Class inputClass() { + return CorpUserUpdateInput.class; + } + + @Override + public CorpUser update( + @Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorizedToUpdate(urn, input, context)) { + // Get existing editable info to merge with + Optional existingCorpUserEditableInfo = + _entityClient.getVersionedAspect( + urn, + CORP_USER_EDITABLE_INFO_NAME, + 0L, + CorpUserEditableInfo.class, + context.getAuthentication()); + + // Create the MCP + final MetadataChangeProposal proposal = + buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(urn), + CORP_USER_EDITABLE_INFO_NAME, + mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); + _entityClient.ingestProposal(proposal, context.getAuthentication(), false); + + return load(urn, context).getData(); + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorizedToUpdate( + String urn, CorpUserUpdateInput input, QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); + + // Either the updating actor is the user, or the actor has privileges to update the user + // information. + return context.getActorUrn().equals(urn) + || AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getSlack() != null + || updateInput.getEmail() != null + || updateInput.getPhone() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); + } else if (updateInput.getAboutMe() != null + || updateInput.getDisplayName() != null + || updateInput.getPictureLink() != null + || updateInput.getTeams() != null + || updateInput.getTitle() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); + } - private final EntityClient _entityClient; - private final FeatureFlags _featureFlags; + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - public CorpUserType(final EntityClient entityClient, final FeatureFlags featureFlags) { - _entityClient = entityClient; - _featureFlags = featureFlags; - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } - @Override - public Class objectClass() { - return CorpUser.class; + private RecordTemplate mapCorpUserEditableInfo( + CorpUserUpdateInput input, Optional existing) { + CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public EntityType type() { - return EntityType.CORP_USER; + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; + if (input.getPictureLink() != null) { + result.setPictureLink(new Url(input.getPictureLink())); } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - try { - final List corpUserUrns = urns - .stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map corpUserMap = _entityClient - .batchGetV2(CORP_USER_ENTITY_NAME, new HashSet<>(corpUserUrns), null, - context.getAuthentication()); - - final List results = new ArrayList<>(); - for (Urn urn : corpUserUrns) { - results.add(corpUserMap.getOrDefault(urn, null)); - } - return results.stream() - .map(gmsCorpUser -> gmsCorpUser == null ? null - : DataFetcherResult.newResult().data(CorpUserMapper.map(gmsCorpUser, _featureFlags)).build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (input.getAboutMe() != null) { + result.setAboutMe(input.getAboutMe()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search("corpuser", query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (input.getSkills() != null) { + result.setSkills(new StringArray(input.getSkills())); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("corpuser", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (input.getTeams() != null) { + result.setTeams(new StringArray(input.getTeams())); } - - public Class inputClass() { - return CorpUserUpdateInput.class; + if (input.getTitle() != null) { + result.setTitle(input.getTitle()); } - - @Override - public CorpUser update(@Nonnull String urn, @Nonnull CorpUserUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorizedToUpdate(urn, input, context)) { - // Get existing editable info to merge with - Optional existingCorpUserEditableInfo = - _entityClient.getVersionedAspect(urn, CORP_USER_EDITABLE_INFO_NAME, 0L, CorpUserEditableInfo.class, - context.getAuthentication()); - - // Create the MCP - final MetadataChangeProposal proposal = buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(urn), - CORP_USER_EDITABLE_INFO_NAME, mapCorpUserEditableInfo(input, existingCorpUserEditableInfo)); - _entityClient.ingestProposal(proposal, context.getAuthentication(), false); - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + if (input.getPhone() != null) { + result.setPhone(input.getPhone()); } - - private boolean isAuthorizedToUpdate(String urn, CorpUserUpdateInput input, QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(input); - - // Either the updating actor is the user, or the actor has privileges to update the user information. - return context.getActorUrn().equals(urn) || AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.CORP_GROUP_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + if (input.getSlack() != null) { + result.setSlack(input.getSlack()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final CorpUserUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getSlack() != null - || updateInput.getEmail() != null - || updateInput.getPhone() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_CONTACT_INFO_PRIVILEGE.getType()); - } else if (updateInput.getAboutMe() != null - || updateInput.getDisplayName() != null - || updateInput.getPictureLink() != null - || updateInput.getTeams() != null - || updateInput.getTitle() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_USER_PROFILE_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (input.getEmail() != null) { + result.setEmail(input.getEmail()); } - private RecordTemplate mapCorpUserEditableInfo(CorpUserUpdateInput input, Optional existing) { - CorpUserEditableInfo result = existing.orElseGet(() -> new CorpUserEditableInfo()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getPictureLink() != null) { - result.setPictureLink(new Url(input.getPictureLink())); - } - if (input.getAboutMe() != null) { - result.setAboutMe(input.getAboutMe()); - } - if (input.getSkills() != null) { - result.setSkills(new StringArray(input.getSkills())); - } - if (input.getTeams() != null) { - result.setTeams(new StringArray(input.getTeams())); - } - if (input.getTitle() != null) { - result.setTitle(input.getTitle()); - } - if (input.getPhone() != null) { - result.setPhone(input.getPhone()); - } - if (input.getSlack() != null) { - result.setSlack(input.getSlack()); - } - if (input.getEmail() != null) { - result.setEmail(input.getEmail()); - } - - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java index 0b5b40c3117e05..9cf8da69281a96 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/CorpUserUtils.java @@ -1,21 +1,21 @@ package com.linkedin.datahub.graphql.types.corpuser; -import java.net.URISyntaxException; - import com.linkedin.common.urn.CorpuserUrn; +import java.net.URISyntaxException; public class CorpUserUtils { - private CorpUserUtils() { } + private CorpUserUtils() {} - public static CorpuserUrn getCorpUserUrn(final String urnStr) { - if (urnStr == null) { - return null; - } - try { - return CorpuserUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to create CorpUserUrn from string %s", urnStr), e); - } + public static CorpuserUrn getCorpUserUrn(final String urnStr) { + if (urnStr == null) { + return null; + } + try { + return CorpuserUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to create CorpUserUrn from string %s", urnStr), e); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java index 2a9f0efd69bcc8..3ee353293393e7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserEditableInfoMapper.java @@ -2,36 +2,38 @@ import com.linkedin.datahub.graphql.generated.CorpUserEditableProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpUserEditableInfoMapper implements ModelMapper { +public class CorpUserEditableInfoMapper + implements ModelMapper { - public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); + public static final CorpUserEditableInfoMapper INSTANCE = new CorpUserEditableInfoMapper(); - public static CorpUserEditableProperties map(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - return INSTANCE.apply(info); - } + public static CorpUserEditableProperties map( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + return INSTANCE.apply(info); + } - @Override - public CorpUserEditableProperties apply(@Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { - final CorpUserEditableProperties result = new CorpUserEditableProperties(); - result.setDisplayName(info.getDisplayName()); - result.setTitle(info.getTitle()); - result.setAboutMe(info.getAboutMe()); - result.setSkills(info.getSkills()); - result.setTeams(info.getTeams()); - result.setEmail(info.getEmail()); - result.setPhone(info.getPhone()); - result.setSlack(info.getSlack()); - if (info.hasPictureLink()) { - result.setPictureLink(info.getPictureLink().toString()); - } - return result; + @Override + public CorpUserEditableProperties apply( + @Nonnull final com.linkedin.identity.CorpUserEditableInfo info) { + final CorpUserEditableProperties result = new CorpUserEditableProperties(); + result.setDisplayName(info.getDisplayName()); + result.setTitle(info.getTitle()); + result.setAboutMe(info.getAboutMe()); + result.setSkills(info.getSkills()); + result.setTeams(info.getTeams()); + result.setEmail(info.getEmail()); + result.setPhone(info.getPhone()); + result.setSlack(info.getSlack()); + if (info.hasPictureLink()) { + result.setPictureLink(info.getPictureLink().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java index 96f60c08cd7c28..9044f4d510bcf9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserInfoMapper.java @@ -3,38 +3,38 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserInfo; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ -public class CorpUserInfoMapper implements ModelMapper { +public class CorpUserInfoMapper + implements ModelMapper { - public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); + public static final CorpUserInfoMapper INSTANCE = new CorpUserInfoMapper(); - public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { - return INSTANCE.apply(corpUserInfo); - } + public static CorpUserInfo map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + return INSTANCE.apply(corpUserInfo); + } - @Override - public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { - final CorpUserInfo result = new CorpUserInfo(); - result.setActive(info.isActive()); - result.setCountryCode(info.getCountryCode()); - result.setDepartmentId(info.getDepartmentId()); - result.setDepartmentName(info.getDepartmentName()); - result.setEmail(info.getEmail()); - result.setDisplayName(info.getDisplayName()); - result.setFirstName(info.getFirstName()); - result.setLastName(info.getLastName()); - result.setFullName(info.getFullName()); - result.setTitle(info.getTitle()); - if (info.hasManagerUrn()) { - result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); - } - return result; + @Override + public CorpUserInfo apply(@Nonnull final com.linkedin.identity.CorpUserInfo info) { + final CorpUserInfo result = new CorpUserInfo(); + result.setActive(info.isActive()); + result.setCountryCode(info.getCountryCode()); + result.setDepartmentId(info.getDepartmentId()); + result.setDepartmentName(info.getDepartmentName()); + result.setEmail(info.getEmail()); + result.setDisplayName(info.getDisplayName()); + result.setFirstName(info.getFirstName()); + result.setLastName(info.getLastName()); + result.setFullName(info.getFullName()); + result.setTitle(info.getTitle()); + if (info.hasManagerUrn()) { + result.setManager(new CorpUser.Builder().setUrn(info.getManagerUrn().toString()).build()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java index adcfb91c9cdf29..98783131a2d521 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.corpuser.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.GlobalTags; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -26,120 +28,134 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class CorpUserMapper { - public static final CorpUserMapper INSTANCE = new CorpUserMapper(); - - public static CorpUser map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse, null); - } - - public static CorpUser map(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - return INSTANCE.apply(entityResponse, featureFlags); - } - - public CorpUser apply(@Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { - final CorpUser result = new CorpUser(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.CORP_USER); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); - mappingHelper.mapToResult(CORP_USER_INFO_ASPECT_NAME, (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); - mappingHelper.mapToResult(CORP_USER_EDITABLE_INFO_ASPECT_NAME, (corpUser, dataMap) -> - corpUser.setEditableProperties(CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (corpUser, dataMap) -> + public static final CorpUserMapper INSTANCE = new CorpUserMapper(); + + public static CorpUser map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse, null); + } + + public static CorpUser map( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + return INSTANCE.apply(entityResponse, featureFlags); + } + + public CorpUser apply( + @Nonnull final EntityResponse entityResponse, @Nullable final FeatureFlags featureFlags) { + final CorpUser result = new CorpUser(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.CORP_USER); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(CORP_USER_KEY_ASPECT_NAME, this::mapCorpUserKey); + mappingHelper.mapToResult( + CORP_USER_INFO_ASPECT_NAME, + (corpUser, dataMap) -> this.mapCorpUserInfo(corpUser, dataMap, entityUrn)); + mappingHelper.mapToResult( + CORP_USER_EDITABLE_INFO_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setEditableProperties( + CorpUserEditableInfoMapper.map(new CorpUserEditableInfo(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (corpUser, dataMap) -> corpUser.setGlobalTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(CORP_USER_STATUS_ASPECT_NAME, - (corpUser, dataMap) -> corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); - mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); - - mapCorpUserSettings(result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); - - return mappingHelper.getResult(); + mappingHelper.mapToResult( + CORP_USER_STATUS_ASPECT_NAME, + (corpUser, dataMap) -> + corpUser.setStatus(CorpUserStatusMapper.map(new CorpUserStatus(dataMap)))); + mappingHelper.mapToResult(CORP_USER_CREDENTIALS_ASPECT_NAME, this::mapIsNativeUser); + + mapCorpUserSettings( + result, aspectMap.getOrDefault(CORP_USER_SETTINGS_ASPECT_NAME, null), featureFlags); + + return mappingHelper.getResult(); + } + + private void mapCorpUserSettings( + @Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { + CorpUserSettings corpUserSettings = new CorpUserSettings(); + if (envelopedAspect != null) { + corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); } + com.linkedin.datahub.graphql.generated.CorpUserSettings result = + new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - private void mapCorpUserSettings(@Nonnull CorpUser corpUser, EnvelopedAspect envelopedAspect, FeatureFlags featureFlags) { - CorpUserSettings corpUserSettings = new CorpUserSettings(); - if (envelopedAspect != null) { - corpUserSettings = new CorpUserSettings(envelopedAspect.getValue().data()); - } - com.linkedin.datahub.graphql.generated.CorpUserSettings result = - new com.linkedin.datahub.graphql.generated.CorpUserSettings(); - - // Map Appearance Settings -- Appearance settings always exist. - result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); + // Map Appearance Settings -- Appearance settings always exist. + result.setAppearance(mapCorpUserAppearanceSettings(corpUserSettings, featureFlags)); - // Map Views Settings. - if (corpUserSettings.hasViews()) { - result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); - } - - corpUser.setSettings(result); + // Map Views Settings. + if (corpUserSettings.hasViews()) { + result.setViews(mapCorpUserViewsSettings(corpUserSettings.getViews())); } - @Nonnull - private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( - @Nonnull final CorpUserSettings corpUserSettings, - @Nullable final FeatureFlags featureFlags - ) { - CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); - if (featureFlags != null) { - appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); - } else { - appearanceResult.setShowSimplifiedHomepage(false); - } - - if (corpUserSettings.hasAppearance()) { - appearanceResult.setShowSimplifiedHomepage(corpUserSettings.getAppearance().isShowSimplifiedHomepage()); - } - return appearanceResult; + corpUser.setSettings(result); + } + + @Nonnull + private CorpUserAppearanceSettings mapCorpUserAppearanceSettings( + @Nonnull final CorpUserSettings corpUserSettings, @Nullable final FeatureFlags featureFlags) { + CorpUserAppearanceSettings appearanceResult = new CorpUserAppearanceSettings(); + if (featureFlags != null) { + appearanceResult.setShowSimplifiedHomepage(featureFlags.isShowSimplifiedHomepageByDefault()); + } else { + appearanceResult.setShowSimplifiedHomepage(false); } - @Nonnull - private CorpUserViewsSettings mapCorpUserViewsSettings(@Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { - CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); - - if (viewsSettings.hasDefaultView()) { - final DataHubView unresolvedView = new DataHubView(); - unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); - unresolvedView.setType(EntityType.DATAHUB_VIEW); - viewsResult.setDefaultView(unresolvedView); - } - - return viewsResult; - } - - private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserKey corpUserKey = new CorpUserKey(dataMap); - corpUser.setUsername(corpUserKey.getUsername()); + if (corpUserSettings.hasAppearance()) { + appearanceResult.setShowSimplifiedHomepage( + corpUserSettings.getAppearance().isShowSimplifiedHomepage()); } - - private void mapCorpUserInfo(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); - corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); - corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); - CorpUserProperties corpUserProperties = corpUser.getProperties(); - if (corpUserInfo.hasCustomProperties()) { - corpUserProperties.setCustomProperties(CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); - } - corpUser.setProperties(corpUserProperties); + return appearanceResult; + } + + @Nonnull + private CorpUserViewsSettings mapCorpUserViewsSettings( + @Nonnull final com.linkedin.identity.CorpUserViewsSettings viewsSettings) { + CorpUserViewsSettings viewsResult = new CorpUserViewsSettings(); + + if (viewsSettings.hasDefaultView()) { + final DataHubView unresolvedView = new DataHubView(); + unresolvedView.setUrn(viewsSettings.getDefaultView().toString()); + unresolvedView.setType(EntityType.DATAHUB_VIEW); + viewsResult.setDefaultView(unresolvedView); } - private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { - CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); - boolean isNativeUser = - corpUserCredentials != null && corpUserCredentials.hasSalt() && corpUserCredentials.hasHashedPassword(); - corpUser.setIsNativeUser(isNativeUser); + return viewsResult; + } + + private void mapCorpUserKey(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserKey corpUserKey = new CorpUserKey(dataMap); + corpUser.setUsername(corpUserKey.getUsername()); + } + + private void mapCorpUserInfo( + @Nonnull CorpUser corpUser, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + CorpUserInfo corpUserInfo = new CorpUserInfo(dataMap); + corpUser.setProperties(CorpUserPropertiesMapper.map(corpUserInfo)); + corpUser.setInfo(CorpUserInfoMapper.map(corpUserInfo)); + CorpUserProperties corpUserProperties = corpUser.getProperties(); + if (corpUserInfo.hasCustomProperties()) { + corpUserProperties.setCustomProperties( + CustomPropertiesMapper.map(corpUserInfo.getCustomProperties(), entityUrn)); } + corpUser.setProperties(corpUserProperties); + } + + private void mapIsNativeUser(@Nonnull CorpUser corpUser, @Nonnull DataMap dataMap) { + CorpUserCredentials corpUserCredentials = new CorpUserCredentials(dataMap); + boolean isNativeUser = + corpUserCredentials != null + && corpUserCredentials.hasSalt() + && corpUserCredentials.hasHashedPassword(); + corpUser.setIsNativeUser(isNativeUser); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java index c64406a74733bc..106e3de6612015 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserPropertiesMapper.java @@ -3,18 +3,16 @@ import com.linkedin.datahub.graphql.generated.CorpUser; import com.linkedin.datahub.graphql.generated.CorpUserProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ -public class CorpUserPropertiesMapper implements ModelMapper { +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ +public class CorpUserPropertiesMapper + implements ModelMapper { public static final CorpUserPropertiesMapper INSTANCE = new CorpUserPropertiesMapper(); - public static CorpUserProperties map(@Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { + public static CorpUserProperties map( + @Nonnull final com.linkedin.identity.CorpUserInfo corpUserInfo) { return INSTANCE.apply(corpUserInfo); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java index d0644fbfdacec0..dd9e465a2d4ea9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/corpuser/mappers/CorpUserStatusMapper.java @@ -2,14 +2,15 @@ import com.linkedin.datahub.graphql.generated.CorpUserStatus; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class CorpUserStatusMapper implements ModelMapper { +public class CorpUserStatusMapper + implements ModelMapper { public static final CorpUserStatusMapper INSTANCE = new CorpUserStatusMapper(); - public static CorpUserStatus map(@Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { + public static CorpUserStatus map( + @Nonnull final com.linkedin.identity.CorpUserStatus corpUserStatus) { return INSTANCE.apply(corpUserStatus); } @@ -18,4 +19,4 @@ public CorpUserStatus apply(@Nonnull final com.linkedin.identity.CorpUserStatus // Warning- if the backend provides an unexpected value this will fail. return CorpUserStatus.valueOf(status.getStatus()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 104c7c004cb664..d01f9b3945dc34 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dashboard; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -55,191 +58,214 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DashboardType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DASHBOARD_KEY_ASPECT_NAME, + DASHBOARD_INFO_ASPECT_NAME, + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + INPUT_FIELDS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("access", "tool"); -public class DashboardType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DASHBOARD_KEY_ASPECT_NAME, - DASHBOARD_INFO_ASPECT_NAME, - EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - INPUT_FIELDS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("access", "tool"); - - private final EntityClient _entityClient; - - public DashboardType(final EntityClient entityClient) { - _entityClient = entityClient; - } + private final EntityClient _entityClient; - @Override - public Class inputClass() { - return DashboardUpdateInput.class; - } + public DashboardType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DASHBOARD; - } + @Override + public Class inputClass() { + return DashboardUpdateInput.class; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public EntityType type() { + return EntityType.DASHBOARD; + } - @Override - public Class objectClass() { - return Dashboard.class; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dashboardMap = - _entityClient.batchGetV2( - Constants.DASHBOARD_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dashboardMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDashboard -> gmsDashboard == null ? null : DataFetcherResult.newResult() - .data(DashboardMapper.map(gmsDashboard)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Dashboards", e); - } - } + @Override + public Class objectClass() { + return Dashboard.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dashboard", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dashboardMap = + _entityClient.batchGetV2( + Constants.DASHBOARD_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dashboardMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDashboard -> + gmsDashboard == null + ? null + : DataFetcherResult.newResult() + .data(DashboardMapper.map(gmsDashboard)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Dashboards", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dashboard", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dashboard", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { - try { - return DashboardUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); - } - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dashboard", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - @Override - public Dashboard update(@Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DashboardUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(getDashboardUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + private com.linkedin.common.urn.DashboardUrn getDashboardUrn(String urnStr) { + try { + return DashboardUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dashboard with urn %s, invalid urn", urnStr)); } + } + + @Override + public Dashboard update( + @Nonnull String urn, @Nonnull DashboardUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DashboardUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DashboardUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DASHBOARD_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges( + final DashboardUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DashboardUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java index 432624ac4699f4..704d2ae308c1ae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -26,13 +28,13 @@ import com.linkedin.datahub.graphql.types.chart.mappers.InputFieldsMapper; import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; @@ -49,161 +51,202 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DashboardMapper implements ModelMapper { - public static final DashboardMapper INSTANCE = new DashboardMapper(); - - public static Dashboard map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Dashboard apply(@Nonnull final EntityResponse entityResponse) { - final Dashboard result = new Dashboard(); - Urn entityUrn = entityResponse.getUrn(); - - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DASHBOARD); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); - mappingHelper.mapToResult(DASHBOARD_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dashboard, dataMap) -> + public static final DashboardMapper INSTANCE = new DashboardMapper(); + + public static Dashboard map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public Dashboard apply(@Nonnull final EntityResponse entityResponse) { + final Dashboard result = new Dashboard(); + Urn entityUrn = entityResponse.getUrn(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DASHBOARD); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DASHBOARD_KEY_ASPECT_NAME, this::mapDashboardKey); + mappingHelper.mapToResult( + DASHBOARD_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapDashboardInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME, this::mapEditableDashboardProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dashboard, dataMap) -> + dashboard.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(INPUT_FIELDS_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + INPUT_FIELDS_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setInputFields(InputFieldsMapper.map(new InputFields(dataMap), entityUrn))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dashboard, dataMap) -> + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final DashboardKey gmsKey = new DashboardKey(dataMap); - dashboard.setDashboardId(gmsKey.getDashboardId()); - dashboard.setTool(gmsKey.getDashboardTool()); - dashboard.setPlatform(DataPlatform.builder() + return mappingHelper.getResult(); + } + + private void mapDashboardKey(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final DashboardKey gmsKey = new DashboardKey(dataMap); + dashboard.setDashboardId(gmsKey.getDashboardId()); + dashboard.setTool(gmsKey.getDashboardTool()); + dashboard.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getDashboardTool()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getDashboardTool()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapDashboardInfo( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = + new com.linkedin.dashboard.DashboardInfo(dataMap); + dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); + dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link + * DashboardInfo} + */ + private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardInfo result = new DashboardInfo(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + result.setCharts( + info.getCharts().stream() + .map( + urn -> { + final Chart chart = new Chart(); + chart.setUrn(urn.toString()); + return chart; + }) + .collect(Collectors.toList())); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapDashboardInfo(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.dashboard.DashboardInfo gmsDashboardInfo = new com.linkedin.dashboard.DashboardInfo(dataMap); - dashboard.setInfo(mapInfo(gmsDashboardInfo, entityUrn)); - dashboard.setProperties(mapDashboardInfoToProperties(gmsDashboardInfo, entityUrn)); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to deprecated GraphQL {@link DashboardInfo} - */ - private DashboardInfo mapInfo(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardInfo result = new DashboardInfo(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - result.setCharts(info.getCharts().stream().map(urn -> { - final Chart chart = new Chart(); - chart.setUrn(urn.toString()); - return chart; - }).collect(Collectors.toList())); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); } - - /** - * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link DashboardProperties} - */ - private DashboardProperties mapDashboardInfoToProperties(final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { - final DashboardProperties result = new DashboardProperties(); - result.setDescription(info.getDescription()); - result.setName(info.getTitle()); - result.setLastRefreshed(info.getLastRefreshed()); - - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } else if (info.hasDashboardUrl()) { - // TODO: Migrate to using the External URL field for consistency. - result.setExternalUrl(info.getDashboardUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - if (info.hasAccess()) { - result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); - } - result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); - result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); - if (info.getLastModified().hasDeleted()) { - result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); - } - return result; + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); } - - private void mapEditableDashboardProperties(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(dataMap); - final DashboardEditableProperties dashboardEditableProperties = new DashboardEditableProperties(); - dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); - dashboard.setEditableProperties(dashboardEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.dashboard.DashboardInfo} to new GraphQL {@link + * DashboardProperties} + */ + private DashboardProperties mapDashboardInfoToProperties( + final com.linkedin.dashboard.DashboardInfo info, Urn entityUrn) { + final DashboardProperties result = new DashboardProperties(); + result.setDescription(info.getDescription()); + result.setName(info.getTitle()); + result.setLastRefreshed(info.getLastRefreshed()); + + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } else if (info.hasDashboardUrl()) { + // TODO: Migrate to using the External URL field for consistency. + result.setExternalUrl(info.getDashboardUrl().toString()); } - - private void mapGlobalTags(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dashboard.setGlobalTags(globalTags); - dashboard.setTags(globalTags); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dashboard.setContainer(Container - .builder() + if (info.hasAccess()) { + result.setAccess(AccessLevel.valueOf(info.getAccess().toString())); + } + result.setLastModified(AuditStampMapper.map(info.getLastModified().getLastModified())); + result.setCreated(AuditStampMapper.map(info.getLastModified().getCreated())); + if (info.getLastModified().hasDeleted()) { + result.setDeleted(AuditStampMapper.map(info.getLastModified().getDeleted())); + } + return result; + } + + private void mapEditableDashboardProperties( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(dataMap); + final DashboardEditableProperties dashboardEditableProperties = + new DashboardEditableProperties(); + dashboardEditableProperties.setDescription(editableDashboardProperties.getDescription()); + dashboard.setEditableProperties(dashboardEditableProperties); + } + + private void mapGlobalTags( + @Nonnull Dashboard dashboard, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dashboard.setGlobalTags(globalTags); + dashboard.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dashboard.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); - } + private void mapDomains(@Nonnull Dashboard dashboard, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dashboard.setDomain(DomainAssociationMapper.map(domains, dashboard.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java index f084dbc0bc09f3..6212663ee87e4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dashboard.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,67 +19,65 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; +public class DashboardUpdateInputMapper + implements InputModelMapper, Urn> { + public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); + public static Collection map( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dashboardUpdateInput, actor); + } -public class DashboardUpdateInputMapper implements - InputModelMapper, Urn> { - public static final DashboardUpdateInputMapper INSTANCE = new DashboardUpdateInputMapper(); - - public static Collection map(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dashboardUpdateInput, actor); - } + @Override + public Collection apply( + @Nonnull final DashboardUpdateInput dashboardUpdateInput, @Nonnull final Urn actor) { - @Override - public Collection apply(@Nonnull final DashboardUpdateInput dashboardUpdateInput, - @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - final Collection proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DASHBOARD_ENTITY_NAME); - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); - - if (dashboardUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } - - if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dashboardUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } else { - // Tags override global tags - globalTags.setTags( - new TagAssociationArray( - dashboardUpdateInput.getTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element) - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dashboardUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dashboardUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dashboardUpdateInput.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); - editableDashboardProperties.setDescription(dashboardUpdateInput.getEditableProperties().getDescription()); - if (!editableDashboardProperties.hasCreated()) { - editableDashboardProperties.setCreated(auditStamp); - } - editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); - } + if (dashboardUpdateInput.getTags() != null || dashboardUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dashboardUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } else { + // Tags override global tags + globalTags.setTags( + new TagAssociationArray( + dashboardUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dashboardUpdateInput.getEditableProperties() != null) { + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); + editableDashboardProperties.setDescription( + dashboardUpdateInput.getEditableProperties().getDescription()); + if (!editableDashboardProperties.hasCreated()) { + editableDashboardProperties.setCreated(auditStamp); + } + editableDashboardProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_DASHBOARD_PROPERTIES_ASPECT_NAME)); } + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java index d257aef4be565e..782ec3d3a6c073 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/mappers/DashboardUsageMetricMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class DashboardUsageMetricMapper implements TimeSeriesAspectMapper { public static final DashboardUsageMetricMapper INSTANCE = new DashboardUsageMetricMapper(); @@ -18,8 +17,10 @@ public static DashboardUsageMetrics map(@Nonnull final EnvelopedAspect enveloped @Override public DashboardUsageMetrics apply(EnvelopedAspect envelopedAspect) { com.linkedin.dashboard.DashboardUsageStatistics gmsDashboardUsageStatistics = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), com.linkedin.dashboard.DashboardUsageStatistics.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + com.linkedin.dashboard.DashboardUsageStatistics.class); final com.linkedin.datahub.graphql.generated.DashboardUsageMetrics dashboardUsageMetrics = new com.linkedin.datahub.graphql.generated.DashboardUsageMetrics(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 54f7660064c051..6ec1979cd090d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataflow; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataFlowType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_FLOW_KEY_ASPECT_NAME, + DATA_FLOW_INFO_ASPECT_NAME, + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); + private final EntityClient _entityClient; -public class DataFlowType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_FLOW_KEY_ASPECT_NAME, - DATA_FLOW_INFO_ASPECT_NAME, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("orchestrator", "cluster"); - private final EntityClient _entityClient; - - public DataFlowType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataFlowType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_FLOW; - } + @Override + public EntityType type() { + return EntityType.DATA_FLOW; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return DataFlow.class; - } + @Override + public Class objectClass() { + return DataFlow.class; + } - @Override - public Class inputClass() { - return DataFlowUpdateInput.class; - } - - @Override - public List> batchLoad(final List urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dataFlowMap = - _entityClient.batchGetV2( - Constants.DATA_FLOW_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataFlowMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataFlow -> gmsDataFlow == null ? null : DataFetcherResult.newResult() - .data(DataFlowMapper.map(gmsDataFlow)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Flows", e); - } - } + @Override + public Class inputClass() { + return DataFlowUpdateInput.class; + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("dataFlow", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List> batchLoad( + final List urnStrs, @Nonnull final QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dataFlowMap = + _entityClient.batchGetV2( + Constants.DATA_FLOW_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataFlowMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataFlow -> + gmsDataFlow == null + ? null + : DataFetcherResult.newResult() + .data(DataFlowMapper.map(gmsDataFlow)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Flows", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataFlow", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataFlowUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataFlow", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataFlow update(@Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) throws Exception { + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataFlow", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DataFlowUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + DataFlowUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } + @Override + public DataFlow update( + @Nonnull String urn, @Nonnull DataFlowUpdateInput input, @Nonnull QueryContext context) + throws Exception { - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DataFlowUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataFlowUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_FLOW_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataFlowUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java index 719fa9f0b2bf01..165fae81527ab8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -17,12 +19,12 @@ import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -38,120 +40,147 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataFlowMapper implements ModelMapper { - public static final DataFlowMapper INSTANCE = new DataFlowMapper(); - - public static DataFlow map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public DataFlow apply(@Nonnull final EntityResponse entityResponse) { - final DataFlow result = new DataFlow(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_FLOW); - Urn entityUrn = entityResponse.getUrn(); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); - mappingHelper.mapToResult(DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataFlow, dataMap) -> + public static final DataFlowMapper INSTANCE = new DataFlowMapper(); + + public static DataFlow map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataFlow apply(@Nonnull final EntityResponse entityResponse) { + final DataFlow result = new DataFlow(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_FLOW); + Urn entityUrn = entityResponse.getUrn(); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATA_FLOW_KEY_ASPECT_NAME, this::mapKey); + mappingHelper.mapToResult( + DATA_FLOW_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataFlow, dataMap) -> - dataFlow.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataFlow, dataMap) -> this.mapGlobalTags(dataFlow, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataFlow, dataMap) -> + dataFlow.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataFlow, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataFlow, dataMap) -> dataFlow.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } + return mappingHelper.getResult(); + } - private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final DataFlowKey gmsKey = new DataFlowKey(dataMap); - dataFlow.setOrchestrator(gmsKey.getOrchestrator()); - dataFlow.setFlowId(gmsKey.getFlowId()); - dataFlow.setCluster(gmsKey.getCluster()); - dataFlow.setPlatform(DataPlatform.builder() + private void mapKey(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final DataFlowKey gmsKey = new DataFlowKey(dataMap); + dataFlow.setOrchestrator(gmsKey.getOrchestrator()); + dataFlow.setFlowId(gmsKey.getFlowId()); + dataFlow.setCluster(gmsKey.getCluster()); + dataFlow.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(EntityKeyUtils - .convertEntityKeyToUrn(new DataPlatformKey() - .setPlatformName(gmsKey.getOrchestrator()), DATA_PLATFORM_ENTITY_NAME).toString()).build()); - } - - private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = new com.linkedin.datajob.DataFlowInfo(dataMap); - dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); - dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); - } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} - */ - private DataFlowInfo mapDataFlowInfo(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowInfo result = new DataFlowInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + .setUrn( + EntityKeyUtils.convertEntityKeyToUrn( + new DataPlatformKey().setPlatformName(gmsKey.getOrchestrator()), + DATA_PLATFORM_ENTITY_NAME) + .toString()) + .build()); + } + + private void mapInfo(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.datajob.DataFlowInfo gmsDataFlowInfo = + new com.linkedin.datajob.DataFlowInfo(dataMap); + dataFlow.setInfo(mapDataFlowInfo(gmsDataFlowInfo, entityUrn)); + dataFlow.setProperties(mapDataFlowInfoToProperties(gmsDataFlowInfo, entityUrn)); + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to deprecated GraphQL {@link DataFlowInfo} + */ + private DataFlowInfo mapDataFlowInfo( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowInfo result = new DataFlowInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} - */ - private DataFlowProperties mapDataFlowInfoToProperties(final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { - final DataFlowProperties result = new DataFlowProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - result.setProject(info.getProject()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(dataMap); - final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); - dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); - dataFlow.setEditableProperties(dataFlowEditableProperties); + return result; + } + + /** + * Maps GMS {@link com.linkedin.datajob.DataFlowInfo} to new GraphQL {@link DataFlowProperties} + */ + private DataFlowProperties mapDataFlowInfoToProperties( + final com.linkedin.datajob.DataFlowInfo info, Urn entityUrn) { + final DataFlowProperties result = new DataFlowProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + result.setProject(info.getProject()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - private void mapGlobalTags(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataFlow.setGlobalTags(globalTags); - dataFlow.setTags(globalTags); - } - - private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } + return result; + } + + private void mapEditableProperties(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(dataMap); + final DataFlowEditableProperties dataFlowEditableProperties = new DataFlowEditableProperties(); + dataFlowEditableProperties.setDescription(editableDataFlowProperties.getDescription()); + dataFlow.setEditableProperties(dataFlowEditableProperties); + } + + private void mapGlobalTags( + @Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataFlow.setGlobalTags(globalTags); + dataFlow.setTags(globalTags); + } + + private void mapDomains(@Nonnull DataFlow dataFlow, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + dataFlow.setDomain(DomainAssociationMapper.map(domains, dataFlow.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java index c966fc8338ed49..87579a15d586e2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/mappers/DataFlowUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataflow.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,22 +19,18 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataFlowUpdateInputMapper implements InputModelMapper, Urn> { +public class DataFlowUpdateInputMapper + implements InputModelMapper, Urn> { public static final DataFlowUpdateInputMapper INSTANCE = new DataFlowUpdateInputMapper(); - public static Collection map(@Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + public static Collection map( + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(dataFlowUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DataFlowUpdateInput dataFlowUpdateInput, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(3); final AuditStamp auditStamp = new AuditStamp(); auditStamp.setActor(actor, SetMode.IGNORE_NULL); @@ -41,7 +39,8 @@ public Collection apply( if (dataFlowUpdateInput.getOwnership() != null) { proposals.add( - updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataFlowUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } @@ -50,28 +49,29 @@ public Collection apply( if (dataFlowUpdateInput.getGlobalTags() != null) { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } else { globalTags.setTags( new TagAssociationArray( - dataFlowUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); + dataFlowUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (dataFlowUpdateInput.getEditableProperties() != null) { - final EditableDataFlowProperties editableDataFlowProperties = new EditableDataFlowProperties(); - editableDataFlowProperties.setDescription(dataFlowUpdateInput.getEditableProperties().getDescription()); + final EditableDataFlowProperties editableDataFlowProperties = + new EditableDataFlowProperties(); + editableDataFlowProperties.setDescription( + dataFlowUpdateInput.getEditableProperties().getDescription()); editableDataFlowProperties.setCreated(auditStamp); editableDataFlowProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataFlowProperties, - EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataFlowProperties, EDITABLE_DATA_FLOW_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index f6f37978bb36aa..6e71584007504b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.datajob; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -37,8 +40,8 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -54,178 +57,201 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; +public class DataJobType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATA_JOB_KEY_ASPECT_NAME, + DATA_JOB_INFO_ASPECT_NAME, + DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, + EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + private static final Set FACET_FIELDS = ImmutableSet.of("flow"); + private final EntityClient _entityClient; -public class DataJobType implements SearchableEntityType, BrowsableEntityType, - MutableType { - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATA_JOB_KEY_ASPECT_NAME, - DATA_JOB_INFO_ASPECT_NAME, - DATA_JOB_INPUT_OUTPUT_ASPECT_NAME, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - private static final Set FACET_FIELDS = ImmutableSet.of("flow"); - private final EntityClient _entityClient; - - public DataJobType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataJobType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.DATA_JOB; - } + @Override + public EntityType type() { + return EntityType.DATA_JOB; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return DataJob.class; - } + @Override + public Class objectClass() { + return DataJob.class; + } - @Override - public Class inputClass() { - return DataJobUpdateInput.class; - } + @Override + public Class inputClass() { + return DataJobUpdateInput.class; + } - @Override - public List> batchLoad(final List urnStrs, @Nonnull final QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - try { - final Map dataJobMap = _entityClient.batchGetV2( - Constants.DATA_JOB_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(dataJobMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataJob -> gmsDataJob == null ? null : DataFetcherResult.newResult() - .data(DataJobMapper.map(gmsDataJob)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Jobs", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "dataJob", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public List> batchLoad( + final List urnStrs, @Nonnull final QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + try { + final Map dataJobMap = + _entityClient.batchGetV2( + Constants.DATA_JOB_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(dataJobMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataJob -> + gmsDataJob == null + ? null + : DataFetcherResult.newResult() + .data(DataJobMapper.map(gmsDataJob)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Jobs", e); } + } - @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( "dataJob", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("dataJob", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } - @Override - public DataJob update(@Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DataJobUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataJob", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DataJobUrn.createFromString(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public DataJob update( + @Nonnull String urn, @Nonnull DataJobUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DataJobUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - private boolean isAuthorized(@Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DataJobUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATA_JOB_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DataJobUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); + } + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); + } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java index 61802ad9cfe5c1..0d0e7a613c8d81 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -21,13 +23,13 @@ import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.FineGrainedLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -42,143 +44,164 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataJobMapper implements ModelMapper { - public static final DataJobMapper INSTANCE = new DataJobMapper(); + public static final DataJobMapper INSTANCE = new DataJobMapper(); - public static DataJob map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static DataJob map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public DataJob apply(@Nonnull final EntityResponse entityResponse) { - final DataJob result = new DataJob(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public DataJob apply(@Nonnull final EntityResponse entityResponse) { + final DataJob result = new DataJob(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_JOB); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_JOB); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - entityResponse.getAspects().forEach((name, aspect) -> { - DataMap data = aspect.getValue().data(); - if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { + entityResponse + .getAspects() + .forEach( + (name, aspect) -> { + DataMap data = aspect.getValue().data(); + if (DATA_JOB_KEY_ASPECT_NAME.equals(name)) { final DataJobKey gmsKey = new DataJobKey(data); - result.setDataFlow(new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); + result.setDataFlow( + new DataFlow.Builder().setUrn(gmsKey.getFlow().toString()).build()); result.setJobId(gmsKey.getJobId()); - } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = new com.linkedin.datajob.DataJobInfo(data); + } else if (DATA_JOB_INFO_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInfo gmsDataJobInfo = + new com.linkedin.datajob.DataJobInfo(data); result.setInfo(mapDataJobInfo(gmsDataJobInfo, entityUrn)); result.setProperties(mapDataJobInfoToProperties(gmsDataJobInfo, entityUrn)); - } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { - final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = new com.linkedin.datajob.DataJobInputOutput(data); + } else if (DATA_JOB_INPUT_OUTPUT_ASPECT_NAME.equals(name)) { + final com.linkedin.datajob.DataJobInputOutput gmsDataJobInputOutput = + new com.linkedin.datajob.DataJobInputOutput(data); result.setInputOutput(mapDataJobInputOutput(gmsDataJobInputOutput)); - } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(data); - final DataJobEditableProperties dataJobEditableProperties = new DataJobEditableProperties(); - dataJobEditableProperties.setDescription(editableDataJobProperties.getDescription()); + } else if (EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME.equals(name)) { + final EditableDataJobProperties editableDataJobProperties = + new EditableDataJobProperties(data); + final DataJobEditableProperties dataJobEditableProperties = + new DataJobEditableProperties(); + dataJobEditableProperties.setDescription( + editableDataJobProperties.getDescription()); result.setEditableProperties(dataJobEditableProperties); - } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { + } else if (OWNERSHIP_ASPECT_NAME.equals(name)) { result.setOwnership(OwnershipMapper.map(new Ownership(data), entityUrn)); - } else if (STATUS_ASPECT_NAME.equals(name)) { + } else if (STATUS_ASPECT_NAME.equals(name)) { result.setStatus(StatusMapper.map(new Status(data))); - } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(data), entityUrn); + } else if (GLOBAL_TAGS_ASPECT_NAME.equals(name)) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(data), entityUrn); result.setGlobalTags(globalTags); result.setTags(globalTags); - } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); - } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { - result.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); - } else if (DOMAINS_ASPECT_NAME.equals(name)) { + } else if (INSTITUTIONAL_MEMORY_ASPECT_NAME.equals(name)) { + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(data), entityUrn)); + } else if (GLOSSARY_TERMS_ASPECT_NAME.equals(name)) { + result.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(data), entityUrn)); + } else if (DOMAINS_ASPECT_NAME.equals(name)) { final Domains domains = new Domains(data); // Currently we only take the first domain if it exists. result.setDomain(DomainAssociationMapper.map(domains, entityUrn.toString())); - } else if (DEPRECATION_ASPECT_NAME.equals(name)) { + } else if (DEPRECATION_ASPECT_NAME.equals(name)) { result.setDeprecation(DeprecationMapper.map(new Deprecation(data))); - } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { - result.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); - } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { + } else if (DATA_PLATFORM_INSTANCE_ASPECT_NAME.equals(name)) { + result.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(data))); + } else if (BROWSE_PATHS_V2_ASPECT_NAME.equals(name)) { result.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(data))); - } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { + } else if (SUB_TYPES_ASPECT_NAME.equals(name)) { result.setSubTypes(SubTypesMapper.map(new SubTypes(data))); - } - }); - - return result; + } + }); + + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} */ + private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobInfo result = new DataJobInfo(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to deprecated GraphQL {@link DataJobInfo} - */ - private DataJobInfo mapDataJobInfo(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobInfo result = new DataJobInfo(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); } - - /** - * Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} - */ - private DataJobProperties mapDataJobInfoToProperties(final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { - final DataJobProperties result = new DataJobProperties(); - result.setName(info.getName()); - result.setDescription(info.getDescription()); - if (info.hasExternalUrl()) { - result.setExternalUrl(info.getExternalUrl().toString()); - } - if (info.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); - } - return result; + return result; + } + + /** Maps GMS {@link com.linkedin.datajob.DataJobInfo} to new GraphQL {@link DataJobProperties} */ + private DataJobProperties mapDataJobInfoToProperties( + final com.linkedin.datajob.DataJobInfo info, Urn entityUrn) { + final DataJobProperties result = new DataJobProperties(); + result.setName(info.getName()); + result.setDescription(info.getDescription()); + if (info.hasExternalUrl()) { + result.setExternalUrl(info.getExternalUrl().toString()); + } + if (info.hasCustomProperties()) { + result.setCustomProperties(CustomPropertiesMapper.map(info.getCustomProperties(), entityUrn)); + } + return result; + } + + private DataJobInputOutput mapDataJobInputOutput( + final com.linkedin.datajob.DataJobInputOutput inputOutput) { + final DataJobInputOutput result = new DataJobInputOutput(); + if (inputOutput.hasInputDatasets()) { + result.setInputDatasets( + inputOutput.getInputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatasets(ImmutableList.of()); + } + if (inputOutput.hasOutputDatasets()) { + result.setOutputDatasets( + inputOutput.getOutputDatasets().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) + .collect(Collectors.toList())); + } else { + result.setOutputDatasets(ImmutableList.of()); + } + if (inputOutput.hasInputDatajobs()) { + result.setInputDatajobs( + inputOutput.getInputDatajobs().stream() + .map( + urn -> { + final DataJob dataJob = new DataJob(); + dataJob.setUrn(urn.toString()); + return dataJob; + }) + .collect(Collectors.toList())); + } else { + result.setInputDatajobs(ImmutableList.of()); } - private DataJobInputOutput mapDataJobInputOutput(final com.linkedin.datajob.DataJobInputOutput inputOutput) { - final DataJobInputOutput result = new DataJobInputOutput(); - if (inputOutput.hasInputDatasets()) { - result.setInputDatasets(inputOutput.getInputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setInputDatasets(ImmutableList.of()); - } - if (inputOutput.hasOutputDatasets()) { - result.setOutputDatasets(inputOutput.getOutputDatasets().stream().map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }).collect(Collectors.toList())); - } else { - result.setOutputDatasets(ImmutableList.of()); - } - if (inputOutput.hasInputDatajobs()) { - result.setInputDatajobs(inputOutput.getInputDatajobs().stream().map(urn -> { - final DataJob dataJob = new DataJob(); - dataJob.setUrn(urn.toString()); - return dataJob; - }).collect(Collectors.toList())); - } else { - result.setInputDatajobs(ImmutableList.of()); - } - - if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { - result.setFineGrainedLineages(FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); - } - - return result; + if (inputOutput.hasFineGrainedLineages() && inputOutput.getFineGrainedLineages() != null) { + result.setFineGrainedLineages( + FineGrainedLineagesMapper.map(inputOutput.getFineGrainedLineages())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java index b075c42d411fb3..b0f299e00b4bae 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/mappers/DataJobUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.datajob.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,63 +19,61 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DataJobUpdateInputMapper implements InputModelMapper, Urn> { - public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); +public class DataJobUpdateInputMapper + implements InputModelMapper, Urn> { + public static final DataJobUpdateInputMapper INSTANCE = new DataJobUpdateInputMapper(); - public static Collection map( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - return INSTANCE.apply(dataJobUpdateInput, actor); - } - - @Override - public Collection apply( - @Nonnull final DataJobUpdateInput dataJobUpdateInput, - @Nonnull final Urn actor) { - final Collection proposals = new ArrayList<>(3); - final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); + public static Collection map( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + return INSTANCE.apply(dataJobUpdateInput, actor); + } - final AuditStamp auditStamp = new AuditStamp(); - auditStamp.setActor(actor, SetMode.IGNORE_NULL); - auditStamp.setTime(System.currentTimeMillis()); + @Override + public Collection apply( + @Nonnull final DataJobUpdateInput dataJobUpdateInput, @Nonnull final Urn actor) { + final Collection proposals = new ArrayList<>(3); + final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATA_JOB_ENTITY_NAME); - if (dataJobUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); - } + final AuditStamp auditStamp = new AuditStamp(); + auditStamp.setActor(actor, SetMode.IGNORE_NULL); + auditStamp.setTime(System.currentTimeMillis()); - if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { - final GlobalTags globalTags = new GlobalTags(); - if (dataJobUpdateInput.getGlobalTags() != null) { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getGlobalTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } else { - globalTags.setTags( - new TagAssociationArray( - dataJobUpdateInput.getTags().getTags().stream().map(TagAssociationUpdateMapper::map - ).collect(Collectors.toList()) - ) - ); - } - proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); - } + if (dataJobUpdateInput.getOwnership() != null) { + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(dataJobUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); + } - if (dataJobUpdateInput.getEditableProperties() != null) { - final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); - editableDataJobProperties.setDescription(dataJobUpdateInput.getEditableProperties().getDescription()); - editableDataJobProperties.setCreated(auditStamp); - editableDataJobProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDataJobProperties, - EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); - } + if (dataJobUpdateInput.getTags() != null || dataJobUpdateInput.getGlobalTags() != null) { + final GlobalTags globalTags = new GlobalTags(); + if (dataJobUpdateInput.getGlobalTags() != null) { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getGlobalTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } else { + globalTags.setTags( + new TagAssociationArray( + dataJobUpdateInput.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); + } + proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); + } - return proposals; + if (dataJobUpdateInput.getEditableProperties() != null) { + final EditableDataJobProperties editableDataJobProperties = new EditableDataJobProperties(); + editableDataJobProperties.setDescription( + dataJobUpdateInput.getEditableProperties().getDescription()); + editableDataJobProperties.setCreated(auditStamp); + editableDataJobProperties.setLastModified(auditStamp); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDataJobProperties, EDITABLE_DATA_JOB_PROPERTIES_ASPECT_NAME)); } + + return proposals; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java index 57a035d136645c..567d275dbee0a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/DataPlatformType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -17,56 +19,60 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformType implements EntityType { - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public DataPlatformType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public DataPlatformType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public Class objectClass() { - return DataPlatform.class; - } + @Override + public Class objectClass() { + return DataPlatform.class; + } - @Override - public List> batchLoad(final List urns, final QueryContext context) { + @Override + public List> batchLoad( + final List urns, final QueryContext context) { - final List dataPlatformUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + final List dataPlatformUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map dataPlatformMap = _entityClient.batchGetV2( - DATA_PLATFORM_ENTITY_NAME, new HashSet<>(dataPlatformUrns), null, context.getAuthentication()); + try { + final Map dataPlatformMap = + _entityClient.batchGetV2( + DATA_PLATFORM_ENTITY_NAME, + new HashSet<>(dataPlatformUrns), + null, + context.getAuthentication()); - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformUrns) { - gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformUrns) { + gmsResults.add(dataPlatformMap.getOrDefault(urn, null)); + } - return gmsResults.stream() - .map(gmsPlatform -> gmsPlatform == null ? null - : DataFetcherResult.newResult() - .data(DataPlatformMapper.map(gmsPlatform)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Data Platforms", e); - } + return gmsResults.stream() + .map( + gmsPlatform -> + gmsPlatform == null + ? null + : DataFetcherResult.newResult() + .data(DataPlatformMapper.map(gmsPlatform)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Data Platforms", e); } + } - @Override - public com.linkedin.datahub.graphql.generated.EntityType type() { - return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; - } + @Override + public com.linkedin.datahub.graphql.generated.EntityType type() { + return com.linkedin.datahub.graphql.generated.EntityType.DATA_PLATFORM; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java index 011fb83cddb334..c2dc3bfabd07c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformInfoMapper.java @@ -6,25 +6,27 @@ import javax.annotation.Nonnull; @Deprecated -public class DataPlatformInfoMapper implements ModelMapper { +public class DataPlatformInfoMapper + implements ModelMapper { - public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); + public static final DataPlatformInfoMapper INSTANCE = new DataPlatformInfoMapper(); - public static DataPlatformInfo map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); - } + public static DataPlatformInfo map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - @Override - public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformInfo result = new DataPlatformInfo(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.hasDisplayName()) { - result.setDisplayName(input.getDisplayName()); - } - if (input.hasLogoUrl()) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + @Override + public DataPlatformInfo apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformInfo result = new DataPlatformInfo(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.hasDisplayName()) { + result.setDisplayName(input.getDisplayName()); + } + if (input.hasLogoUrl()) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java index 8df44e8f6e9e98..f7078f9f37d7c6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatform.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.EntityType; @@ -13,36 +15,40 @@ import com.linkedin.metadata.utils.EntityKeyUtils; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataPlatformMapper implements ModelMapper { - public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); - - public static DataPlatform map(@Nonnull final EntityResponse platform) { - return INSTANCE.apply(platform); - } - - @Override - public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { - final DataPlatform result = new DataPlatform(); - final DataPlatformKey dataPlatformKey = (DataPlatformKey) EntityKeyUtils.convertUrnToEntityKeyInternal(entityResponse.getUrn(), - new DataPlatformKey().schema()); - result.setType(EntityType.DATA_PLATFORM); - Urn urn = entityResponse.getUrn(); - result.setUrn(urn.toString()); - result.setName(dataPlatformKey.getPlatformName()); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PLATFORM_KEY_ASPECT_NAME, (dataPlatform, dataMap) -> + public static final DataPlatformMapper INSTANCE = new DataPlatformMapper(); + + public static DataPlatform map(@Nonnull final EntityResponse platform) { + return INSTANCE.apply(platform); + } + + @Override + public DataPlatform apply(@Nonnull final EntityResponse entityResponse) { + final DataPlatform result = new DataPlatform(); + final DataPlatformKey dataPlatformKey = + (DataPlatformKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + entityResponse.getUrn(), new DataPlatformKey().schema()); + result.setType(EntityType.DATA_PLATFORM); + Urn urn = entityResponse.getUrn(); + result.setUrn(urn.toString()); + result.setName(dataPlatformKey.getPlatformName()); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PLATFORM_KEY_ASPECT_NAME, + (dataPlatform, dataMap) -> dataPlatform.setName(new DataPlatformKey(dataMap).getPlatformName())); - mappingHelper.mapToResult(DATA_PLATFORM_INFO_ASPECT_NAME, (dataPlatform, dataMap) -> - dataPlatform.setProperties(DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + DATA_PLATFORM_INFO_ASPECT_NAME, + (dataPlatform, dataMap) -> + dataPlatform.setProperties( + DataPlatformPropertiesMapper.map(new DataPlatformInfo(dataMap)))); + return mappingHelper.getResult(); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java index c0a236dc1a4025..ad6de5505bed6b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatform/mappers/DataPlatformPropertiesMapper.java @@ -5,27 +5,28 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; +public class DataPlatformPropertiesMapper + implements ModelMapper { -public class DataPlatformPropertiesMapper implements ModelMapper { + public static final DataPlatformPropertiesMapper INSTANCE = new DataPlatformPropertiesMapper(); - public static final DataPlatformPropertiesMapper - INSTANCE = new DataPlatformPropertiesMapper(); + public static DataPlatformProperties map( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { + return INSTANCE.apply(platform); + } - public static DataPlatformProperties map(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo platform) { - return INSTANCE.apply(platform); + @Override + public DataPlatformProperties apply( + @Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { + final DataPlatformProperties result = new DataPlatformProperties(); + result.setType(PlatformType.valueOf(input.getType().toString())); + result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); + if (input.getDisplayName() != null) { + result.setDisplayName(input.getDisplayName()); } - - @Override - public DataPlatformProperties apply(@Nonnull final com.linkedin.dataplatform.DataPlatformInfo input) { - final DataPlatformProperties result = new DataPlatformProperties(); - result.setType(PlatformType.valueOf(input.getType().toString())); - result.setDatasetNameDelimiter(input.getDatasetNameDelimiter()); - if (input.getDisplayName() != null) { - result.setDisplayName(input.getDisplayName()); - } - if (input.getLogoUrl() != null) { - result.setLogoUrl(input.getLogoUrl().toString()); - } - return result; + if (input.getLogoUrl() != null) { + result.setLogoUrl(input.getLogoUrl().toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java index 87614e13325283..6519a493f39917 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,19 +12,15 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; +import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.datahub.graphql.types.dataplatforminstance.mappers.DataPlatformInstanceMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; -import com.linkedin.datahub.graphql.types.SearchableEntityType; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,90 +28,100 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; -import static com.linkedin.metadata.Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME; - -public class DataPlatformInstanceType implements SearchableEntityType, +public class DataPlatformInstanceType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - Constants.DEPRECATION_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - Constants.GLOBAL_TAGS_ASPECT_NAME, - Constants.STATUS_ASPECT_NAME - ); - private final EntityClient _entityClient; - - public DataPlatformInstanceType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.DATA_PLATFORM_INSTANCE; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return DataPlatformInstance.class; + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + Constants.DEPRECATION_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + Constants.GLOBAL_TAGS_ASPECT_NAME, + Constants.STATUS_ASPECT_NAME); + private final EntityClient _entityClient; + + public DataPlatformInstanceType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.DATA_PLATFORM_INSTANCE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return DataPlatformInstance.class; + } + + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataPlatformInstanceUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map entities = + _entityClient.batchGetV2( + Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, + new HashSet<>(dataPlatformInstanceUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : dataPlatformInstanceUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataPlatformInstanceMapper.map(gmsResult)) + .build()) + .collect(Collectors.toList()); + + } catch (Exception e) { + throw new RuntimeException("Failed to batch load DataPlatformInstance", e); } - - @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List dataPlatformInstanceUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map entities = _entityClient.batchGetV2( - Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME, - new HashSet<>(dataPlatformInstanceUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : dataPlatformInstanceUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(DataPlatformInstanceMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - - } catch (Exception e) { - throw new RuntimeException("Failed to batch load DataPlatformInstance", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, - filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on DataPlatformInstance entity type"); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PLATFORM_INSTANCE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java index ba49f23133f9e2..1a2bd0488c4bd6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataplatforminstance/mappers/DataPlatformInstanceMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance.mappers; -import com.linkedin.common.Ownership; +import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; +import com.linkedin.common.Ownership; import com.linkedin.common.Status; -import com.linkedin.common.Deprecation; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; -import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; -import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; -import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.DataPlatformInstance; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; +import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; +import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; +import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; +import com.linkedin.dataplatforminstance.DataPlatformInstanceProperties; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DataPlatformInstanceKey; - import javax.annotation.Nonnull; public class DataPlatformInstanceMapper { @@ -41,65 +40,75 @@ public DataPlatformInstance apply(@Nonnull final EntityResponse entityResponse) final EnvelopedAspectMap aspects = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspects, result); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - this::mapDataPlatformInstanceKey - ); - mappingHelper.mapToResult(Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.OWNERSHIP_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.GLOBAL_TAGS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn) - ); - mappingHelper.mapToResult(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn)) - ); - mappingHelper.mapToResult(Constants.STATUS_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap))) - ); - mappingHelper.mapToResult(Constants.DEPRECATION_ASPECT_NAME, - (dataPlatformInstance, dataMap) -> - dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap))) - ); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, this::mapDataPlatformInstanceKey); + mappingHelper.mapToResult( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapDataPlatformInstanceProperties(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.OWNERSHIP_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setOwnership( + OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.GLOBAL_TAGS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + this.mapGlobalTags(dataPlatformInstance, dataMap, entityUrn)); + mappingHelper.mapToResult( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + Constants.STATUS_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + Constants.DEPRECATION_ASPECT_NAME, + (dataPlatformInstance, dataMap) -> + dataPlatformInstance.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } - private void mapDataPlatformInstanceKey(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { + private void mapDataPlatformInstanceKey( + @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap) { final DataPlatformInstanceKey gmsKey = new DataPlatformInstanceKey(dataMap); - dataPlatformInstance.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()) - .build()); + dataPlatformInstance.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); dataPlatformInstance.setInstanceId(gmsKey.getInstance()); } private void mapDataPlatformInstanceProperties( - @Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn - ) { - final DataPlatformInstanceProperties gmsProperties = new DataPlatformInstanceProperties(dataMap); + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull Urn entityUrn) { + final DataPlatformInstanceProperties gmsProperties = + new DataPlatformInstanceProperties(dataMap); final com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties properties = - new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); + new com.linkedin.datahub.graphql.generated.DataPlatformInstanceProperties(); properties.setName(gmsProperties.getName()); properties.setDescription(gmsProperties.getDescription()); if (gmsProperties.hasExternalUrl()) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } if (gmsProperties.hasCustomProperties()) { - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); } dataPlatformInstance.setProperties(properties); } - private void mapGlobalTags(@Nonnull DataPlatformInstance dataPlatformInstance, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull DataPlatformInstance dataPlatformInstance, + @Nonnull DataMap dataMap, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataPlatformInstance.setTags(globalTags); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java index ee014f9f665719..48a0cb984862d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataprocessinst.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.DataProcessInstance; @@ -12,43 +14,43 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class DataProcessInstanceMapper implements ModelMapper { - public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); - - public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final DataProcessInstanceMapper INSTANCE = new DataProcessInstanceMapper(); + + public static DataProcessInstance map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { + final DataProcessInstance result = new DataProcessInstance(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATA_PROCESS_INSTANCE); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); + + return mappingHelper.getResult(); + } + + private void mapDataProcessProperties( + @Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { + DataProcessInstanceProperties dataProcessInstanceProperties = + new DataProcessInstanceProperties(dataMap); + dpi.setName(dataProcessInstanceProperties.getName()); + if (dataProcessInstanceProperties.hasCreated()) { + dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); } - - @Override - public DataProcessInstance apply(@Nonnull final EntityResponse entityResponse) { - final DataProcessInstance result = new DataProcessInstance(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATA_PROCESS_INSTANCE); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME, this::mapDataProcessProperties); - - return mappingHelper.getResult(); - } - - private void mapDataProcessProperties(@Nonnull DataProcessInstance dpi, @Nonnull DataMap dataMap) { - DataProcessInstanceProperties dataProcessInstanceProperties = new DataProcessInstanceProperties(dataMap); - dpi.setName(dataProcessInstanceProperties.getName()); - if (dataProcessInstanceProperties.hasCreated()) { - dpi.setCreated(AuditStampMapper.map(dataProcessInstanceProperties.getCreated())); - } - if (dataProcessInstanceProperties.hasExternalUrl()) { - dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); - } + if (dataProcessInstanceProperties.hasExternalUrl()) { + dpi.setExternalUrl(dataProcessInstanceProperties.getExternalUrl().toString()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java index ca9a77f7e45cb8..fd60711e8c569d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunEventMapper.java @@ -6,36 +6,41 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - -public class DataProcessInstanceRunEventMapper implements TimeSeriesAspectMapper { - - public static final DataProcessInstanceRunEventMapper INSTANCE = new DataProcessInstanceRunEventMapper(); - - public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map(@Nonnull final EnvelopedAspect envelopedAspect) { - return INSTANCE.apply(envelopedAspect); +public class DataProcessInstanceRunEventMapper + implements TimeSeriesAspectMapper { + + public static final DataProcessInstanceRunEventMapper INSTANCE = + new DataProcessInstanceRunEventMapper(); + + public static com.linkedin.datahub.graphql.generated.DataProcessRunEvent map( + @Nonnull final EnvelopedAspect envelopedAspect) { + return INSTANCE.apply(envelopedAspect); + } + + @Override + public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply( + @Nonnull final EnvelopedAspect envelopedAspect) { + + DataProcessInstanceRunEvent runEvent = + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + DataProcessInstanceRunEvent.class); + + final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = + new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); + + result.setTimestampMillis(runEvent.getTimestampMillis()); + result.setAttempt(runEvent.getAttempt()); + if (runEvent.hasStatus()) { + result.setStatus( + com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf( + runEvent.getStatus().toString())); } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessRunEvent apply(@Nonnull final EnvelopedAspect envelopedAspect) { - - DataProcessInstanceRunEvent runEvent = GenericRecordUtils - .deserializeAspect( - envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), - DataProcessInstanceRunEvent.class); - - final com.linkedin.datahub.graphql.generated.DataProcessRunEvent result = - new com.linkedin.datahub.graphql.generated.DataProcessRunEvent(); - - result.setTimestampMillis(runEvent.getTimestampMillis()); - result.setAttempt(runEvent.getAttempt()); - if (runEvent.hasStatus()) { - result.setStatus(com.linkedin.datahub.graphql.generated.DataProcessRunStatus.valueOf(runEvent.getStatus().toString())); - } - if (runEvent.hasResult()) { - result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); - } - - return result; + if (runEvent.hasResult()) { + result.setResult(DataProcessInstanceRunResultMapper.map(runEvent.getResult())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java index 91b03eea2745f2..422bea73925a85 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataprocessinst/mappers/DataProcessInstanceRunResultMapper.java @@ -5,30 +5,34 @@ import com.linkedin.dataprocess.DataProcessInstanceRunResult; import javax.annotation.Nonnull; +public class DataProcessInstanceRunResultMapper + implements ModelMapper< + DataProcessInstanceRunResult, + com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { -public class DataProcessInstanceRunResultMapper implements ModelMapper< - DataProcessInstanceRunResult, com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult> { + public static final DataProcessInstanceRunResultMapper INSTANCE = + new DataProcessInstanceRunResultMapper(); - public static final DataProcessInstanceRunResultMapper INSTANCE = new DataProcessInstanceRunResultMapper(); + public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map( + @Nonnull final DataProcessInstanceRunResult input) { + return INSTANCE.apply(input); + } - public static com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult map(@Nonnull final DataProcessInstanceRunResult input) { - return INSTANCE.apply(input); - } - - @Override - public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply(@Nonnull final DataProcessInstanceRunResult input) { - - final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = - new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); + @Override + public com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult apply( + @Nonnull final DataProcessInstanceRunResult input) { - if (input.hasType()) { - result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); - } + final com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult result = + new com.linkedin.datahub.graphql.generated.DataProcessInstanceRunResult(); - if (input.hasNativeResultType()) { - result.setNativeResultType(input.getNativeResultType()); - } + if (input.hasType()) { + result.setResultType(DataProcessInstanceRunResultType.valueOf(input.getType().toString())); + } - return result; + if (input.hasNativeResultType()) { + result.setNativeResultType(input.getNativeResultType()); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java index eb8ca23f00b37a..766f6937ce3e29 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/DataProductType.java @@ -1,5 +1,13 @@ package com.linkedin.datahub.graphql.types.dataproduct; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,11 +26,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import lombok.RequiredArgsConstructor; -import org.apache.commons.lang3.NotImplementedException; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -30,26 +33,23 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; - -import static com.linkedin.metadata.Constants.DATA_PRODUCT_ENTITY_NAME; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import lombok.RequiredArgsConstructor; +import org.apache.commons.lang3.NotImplementedException; @RequiredArgsConstructor -public class DataProductType implements SearchableEntityType, - com.linkedin.datahub.graphql.types.EntityType { - public static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - DATA_PRODUCT_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME - ); +public class DataProductType + implements SearchableEntityType, + com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -68,13 +68,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { - final List dataProductUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List dataProductUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATA_PRODUCT_ENTITY_NAME, new HashSet<>(dataProductUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATA_PRODUCT_ENTITY_NAME, + new HashSet<>(dataProductUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -82,8 +86,13 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataProductMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataProductMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); @@ -91,22 +100,28 @@ public List> batchLoad(@Nonnull List urns } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + DATA_PRODUCT_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Data Product entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Data Product entity type"); } - } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java index 254b43ecb96ccb..8039ea08dc722c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataproduct/mappers/DataProductMapper.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.types.dataproduct.mappers; +import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; +import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; +import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -21,17 +28,8 @@ import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.DATA_PRODUCT_PROPERTIES_ASPECT_NAME; -import static com.linkedin.metadata.Constants.DOMAINS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOBAL_TAGS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERMS_ASPECT_NAME; -import static com.linkedin.metadata.Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - - public class DataProductMapper implements ModelMapper { public static final DataProductMapper INSTANCE = new DataProductMapper(); @@ -50,27 +48,44 @@ public DataProduct apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATA_PRODUCT_PROPERTIES_ASPECT_NAME, (dataProduct, dataMap) -> - mapDataProductProperties(dataProduct, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setDomain(DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataProduct, dataMap) -> - dataProduct.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + DATA_PRODUCT_PROPERTIES_ASPECT_NAME, + (dataProduct, dataMap) -> mapDataProductProperties(dataProduct, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + DOMAINS_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setDomain( + DomainAssociationMapper.map(new Domains(dataMap), dataProduct.getUrn()))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataProduct, dataMap) -> + dataProduct.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); return result; } - private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { + private void mapDataProductProperties( + @Nonnull DataProduct dataProduct, @Nonnull DataMap dataMap, @Nonnull Urn urn) { DataProductProperties dataProductProperties = new DataProductProperties(dataMap); - com.linkedin.datahub.graphql.generated.DataProductProperties properties = new com.linkedin.datahub.graphql.generated.DataProductProperties(); + com.linkedin.datahub.graphql.generated.DataProductProperties properties = + new com.linkedin.datahub.graphql.generated.DataProductProperties(); - final String name = dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); + final String name = + dataProductProperties.hasName() ? dataProductProperties.getName() : urn.getId(); properties.setName(name); properties.setDescription(dataProductProperties.getDescription()); if (dataProductProperties.hasExternalUrl()) { @@ -81,7 +96,9 @@ private void mapDataProductProperties(@Nonnull DataProduct dataProduct, @Nonnull } else { properties.setNumAssets(0); } - properties.setCustomProperties(CustomPropertiesMapper.map(dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); + properties.setCustomProperties( + CustomPropertiesMapper.map( + dataProductProperties.getCustomProperties(), UrnUtils.getUrn(dataProduct.getUrn()))); dataProduct.setProperties(properties); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 6f339d3985133f..badb24810c82bf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -8,19 +13,17 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; -import com.linkedin.datahub.graphql.generated.Dataset; -import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; -import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.generated.BrowsePath; +import com.linkedin.datahub.graphql.generated.BrowseResults; +import com.linkedin.datahub.graphql.generated.Dataset; +import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.generated.Entity; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.SearchResults; -import com.linkedin.datahub.graphql.generated.BatchDatasetUpdateInput; import com.linkedin.datahub.graphql.resolvers.ResolverUtils; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.BrowsableEntityType; @@ -37,13 +40,12 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -56,235 +58,266 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetType implements SearchableEntityType, BrowsableEntityType, +public class DatasetType + implements SearchableEntityType, + BrowsableEntityType, BatchMutableType { - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - EMBED_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME, - ACCESS_DATASET_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME - ); - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private static final String ENTITY_NAME = "dataset"; - - private final EntityClient _entityClient; - - public DatasetType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class objectClass() { - return Dataset.class; + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + EMBED_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME, + ACCESS_DATASET_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME); + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private static final String ENTITY_NAME = "dataset"; + + private final EntityClient _entityClient; + + public DatasetType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return Dataset.class; + } + + @Override + public Class inputClass() { + return DatasetUpdateInput.class; + } + + @Override + public Class batchInputClass() { + return BatchDatasetUpdateInput[].class; + } + + @Override + public EntityType type() { + return EntityType.DATASET; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + @Nonnull final List urnStrs, @Nonnull final QueryContext context) { + try { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + final Map datasetMap = + _entityClient.batchGetV2( + Constants.DATASET_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : urns) { + gmsResults.add(datasetMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.newResult() + .data(DatasetMapper.map(gmsDataset)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Datasets", e); } - - @Override - public Class inputClass() { - return DatasetUpdateInput.class; + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "dataset", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } + + @Override + public List batchUpdate( + @Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { + final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); + + final Collection proposals = + Arrays.stream(input) + .map( + updateInput -> { + if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { + Collection datasetProposals = + DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); + datasetProposals.forEach( + proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); + return datasetProposals; + } + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + }) + .flatMap(Collection::stream) + .collect(Collectors.toList()); + + final List urns = + Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } - @Override - public Class batchInputClass() { - return BatchDatasetUpdateInput[].class; + return batchLoad(urns, context).stream() + .map(DataFetcherResult::getData) + .collect(Collectors.toList()); + } + + @Override + public Dataset update( + @Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(urn, input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = + DatasetUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - @Override - public EntityType type() { - return EntityType.DATASET; + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized( + @Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), + urn, + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getInstitutionalMemory() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - @Override - public List> batchLoad(@Nonnull final List urnStrs, - @Nonnull final QueryContext context) { - try { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - final Map datasetMap = - _entityClient.batchGetV2( - Constants.DATASET_ENTITY_NAME, - new HashSet<>(urns), - ASPECTS_TO_RESOLVE, - context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : urns) { - gmsResults.add(datasetMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsDataset -> gmsDataset == null ? null : DataFetcherResult.newResult() - .data(DatasetMapper.map(gmsDataset)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Datasets", e); - } + if (updateInput.getDeprecation() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search(ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); + if (updateInput.getEditableProperties() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(ENTITY_NAME, query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); + if (updateInput.getGlobalTags() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "dataset", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); + if (updateInput.getEditableSchemaMetadata() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); + specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); } - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(DatasetUtils.getDatasetUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - @Override - public List batchUpdate(@Nonnull BatchDatasetUpdateInput[] input, @Nonnull QueryContext context) throws Exception { - final Urn actor = Urn.createFromString(context.getAuthentication().getActor().toUrnStr()); - - final Collection proposals = Arrays.stream(input).map(updateInput -> { - if (isAuthorized(updateInput.getUrn(), updateInput.getUpdate(), context)) { - Collection datasetProposals = DatasetUpdateInputMapper.map(updateInput.getUpdate(), actor); - datasetProposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(updateInput.getUrn()))); - return datasetProposals; - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - }).flatMap(Collection::stream).collect(Collectors.toList()); - - final List urns = Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); - } - - return batchLoad(urns, context).stream().map(DataFetcherResult::getData).collect(Collectors.toList()); - } - - @Override - public Dataset update(@Nonnull String urn, @Nonnull DatasetUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(urn, input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = DatasetUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); - } - - private boolean isAuthorized(@Nonnull String urn, @Nonnull DatasetUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.DATASET_PRIVILEGES.getResourceType(), - urn, - orPrivilegeGroups); - } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final DatasetUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getInstitutionalMemory() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOC_LINKS_PRIVILEGE.getType()); - } - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDeprecation() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_STATUS_PRIVILEGE.getType()); - } - if (updateInput.getEditableProperties() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_DOCS_PRIVILEGE.getType()); - } - if (updateInput.getGlobalTags() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); - } - if (updateInput.getEditableSchemaMetadata() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_TAGS_PRIVILEGE.getType()); - specificPrivileges.add(PoliciesConfig.EDIT_DATASET_COL_DESCRIPTION_PRIVILEGE.getType()); - } - - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); - } + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java index e1aa580276a504..676617bfa2f90d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetUtils.java @@ -1,18 +1,18 @@ package com.linkedin.datahub.graphql.types.dataset; import com.linkedin.common.urn.DatasetUrn; - import java.net.URISyntaxException; public class DatasetUtils { - private DatasetUtils() { } + private DatasetUtils() {} - static DatasetUrn getDatasetUrn(String urnStr) { - try { - return DatasetUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); - } + static DatasetUrn getDatasetUrn(String urnStr) { + try { + return DatasetUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve dataset with urn %s, invalid urn", urnStr)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java index e620bfb30b6b7e..df019cc5df8fed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/VersionedDatasetType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.VersionedUrn; import com.linkedin.common.urn.Urn; @@ -22,32 +24,30 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class VersionedDatasetType implements com.linkedin.datahub.graphql.types.EntityType { +public class VersionedDatasetType + implements com.linkedin.datahub.graphql.types.EntityType { - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - DATASET_KEY_ASPECT_NAME, - DATASET_PROPERTIES_ASPECT_NAME, - EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, - DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. - DEPRECATION_ASPECT_NAME, - DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, - UPSTREAM_LINEAGE_ASPECT_NAME, - EDITABLE_SCHEMA_METADATA_ASPECT_NAME, - VIEW_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - STATUS_ASPECT_NAME, - CONTAINER_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SCHEMA_METADATA_ASPECT_NAME, - SIBLINGS_ASPECT_NAME, - DATA_PRODUCTS_ASPECT_NAME - ); + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + DATASET_KEY_ASPECT_NAME, + DATASET_PROPERTIES_ASPECT_NAME, + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, + DATASET_DEPRECATION_ASPECT_NAME, // This aspect is deprecated. + DEPRECATION_ASPECT_NAME, + DATASET_UPSTREAM_LINEAGE_ASPECT_NAME, + UPSTREAM_LINEAGE_ASPECT_NAME, + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + VIEW_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + STATUS_ASPECT_NAME, + CONTAINER_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SCHEMA_METADATA_ASPECT_NAME, + SIBLINGS_ASPECT_NAME, + DATA_PRODUCTS_ASPECT_NAME); private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); private static final String ENTITY_NAME = "dataset"; @@ -74,8 +74,8 @@ public Function getKeyProvider() { } @Override - public List> batchLoad(@Nonnull final List versionedUrns, - @Nonnull final QueryContext context) { + public List> batchLoad( + @Nonnull final List versionedUrns, @Nonnull final QueryContext context) { try { final Map datasetMap = _entityClient.batchGetVersionedV2( @@ -89,9 +89,13 @@ public List> batchLoad(@Nonnull final List gmsDataset == null ? null : DataFetcherResult.newResult() - .data(VersionedDatasetMapper.map(gmsDataset)) - .build()) + .map( + gmsDataset -> + gmsDataset == null + ? null + : DataFetcherResult.newResult() + .data(VersionedDatasetMapper.map(gmsDataset)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Datasets", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java index 0ec9bed0c85117..5fe7815ea2f8d3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/AssertionRunEventMapper.java @@ -13,7 +13,6 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import javax.annotation.Nonnull; - public class AssertionRunEventMapper implements TimeSeriesAspectMapper { @@ -29,8 +28,10 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( @Nonnull final EnvelopedAspect envelopedAspect) { AssertionRunEvent gmsAssertionRunEvent = - GenericRecordUtils.deserializeAspect(envelopedAspect.getAspect().getValue(), - envelopedAspect.getAspect().getContentType(), AssertionRunEvent.class); + GenericRecordUtils.deserializeAspect( + envelopedAspect.getAspect().getValue(), + envelopedAspect.getAspect().getContentType(), + AssertionRunEvent.class); final com.linkedin.datahub.graphql.generated.AssertionRunEvent assertionRunEvent = new com.linkedin.datahub.graphql.generated.AssertionRunEvent(); @@ -39,7 +40,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setAssertionUrn(gmsAssertionRunEvent.getAssertionUrn().toString()); assertionRunEvent.setAsserteeUrn(gmsAssertionRunEvent.getAsserteeUrn().toString()); assertionRunEvent.setRunId(gmsAssertionRunEvent.getRunId()); - assertionRunEvent.setStatus(AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); + assertionRunEvent.setStatus( + AssertionRunStatus.valueOf(gmsAssertionRunEvent.getStatus().name())); if (gmsAssertionRunEvent.hasBatchSpec()) { assertionRunEvent.setBatchSpec(mapBatchSpec(gmsAssertionRunEvent.getBatchSpec())); } @@ -50,7 +52,8 @@ public com.linkedin.datahub.graphql.generated.AssertionRunEvent apply( assertionRunEvent.setResult(mapAssertionResult(gmsAssertionRunEvent.getResult())); } if (gmsAssertionRunEvent.hasRuntimeContext()) { - assertionRunEvent.setRuntimeContext(StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); + assertionRunEvent.setRuntimeContext( + StringMapMapper.map(gmsAssertionRunEvent.getRuntimeContext())); } return assertionRunEvent; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java index 1adcea7e53dc27..1644e0243a1812 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetDeprecationMapper.java @@ -2,24 +2,25 @@ import com.linkedin.datahub.graphql.generated.Deprecation; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class DatasetDeprecationMapper implements ModelMapper { +public class DatasetDeprecationMapper + implements ModelMapper { - public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); + public static final DatasetDeprecationMapper INSTANCE = new DatasetDeprecationMapper(); - public static Deprecation map(@Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { - return INSTANCE.apply(deprecation); - } + public static Deprecation map( + @Nonnull final com.linkedin.dataset.DatasetDeprecation deprecation) { + return INSTANCE.apply(deprecation); + } - @Override - public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { - final Deprecation result = new Deprecation(); - result.setActor(input.getActor().toString()); - result.setDeprecated(input.isDeprecated()); - result.setDecommissionTime(input.getDecommissionTime()); - result.setNote(input.getNote()); - return result; - } + @Override + public Deprecation apply(@Nonnull final com.linkedin.dataset.DatasetDeprecation input) { + final Deprecation result = new Deprecation(); + result.setActor(input.getActor().toString()); + result.setDeprecated(input.isDeprecated()); + result.setDecommissionTime(input.getDecommissionTime()); + result.setNote(input.getNote()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index 3e39c14c29ede1..7fa1decdf7f552 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Access; import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; @@ -15,6 +17,7 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.DataPlatform; import com.linkedin.datahub.graphql.generated.Dataset; @@ -22,6 +25,7 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.EmbedMapper; @@ -29,15 +33,14 @@ import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.SubTypesMapper; import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.AccessMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; @@ -53,155 +56,198 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ @Slf4j public class DatasetMapper implements ModelMapper { - public static final DatasetMapper INSTANCE = new DatasetMapper(); - - public static Dataset map(@Nonnull final EntityResponse dataset) { - return INSTANCE.apply(dataset); - } - - public Dataset apply(@Nonnull final EntityResponse entityResponse) { - Dataset result = new Dataset(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.DATASET); - - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); - - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + public static final DatasetMapper INSTANCE = new DatasetMapper(); + + public static Dataset map(@Nonnull final EntityResponse dataset) { + return INSTANCE.apply(dataset); + } + + public Dataset apply(@Nonnull final EntityResponse entityResponse) { + Dataset result = new Dataset(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.DATASET); + + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); + + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); - mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(SIBLINGS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); - mappingHelper.mapToResult(UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setFineGrainedLineages(UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); - mappingHelper.mapToResult(EMBED_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (dataset, dataMap) -> + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + SIBLINGS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); + mappingHelper.mapToResult( + UPSTREAM_LINEAGE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setFineGrainedLineages( + UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); + mappingHelper.mapToResult( + EMBED_ASPECT_NAME, + (dataset, dataMap) -> dataset.setEmbed(EmbedMapper.map(new Embed(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (dataset, dataMap) -> dataset.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - mappingHelper.mapToResult(ACCESS_DATASET_ASPECT_NAME, ((dataset, dataMap) -> - dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); - mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, (dashboard, dataMap) -> - dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final DatasetKey gmsKey = new DatasetKey(dataMap); - dataset.setName(gmsKey.getName()); - dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() + mappingHelper.mapToResult( + ACCESS_DATASET_ASPECT_NAME, + ((dataset, dataMap) -> + dataset.setAccess(AccessMapper.map(new Access(dataMap), entityUrn)))); + mappingHelper.mapToResult( + SUB_TYPES_ASPECT_NAME, + (dashboard, dataMap) -> dashboard.setSubTypes(SubTypesMapper.map(new SubTypes(dataMap)))); + return mappingHelper.getResult(); + } + + private void mapDatasetKey(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final DatasetKey gmsKey = new DatasetKey(dataMap); + dataset.setName(gmsKey.getName()); + dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); + dataset.setPlatform( + DataPlatform.builder() .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + .setUrn(gmsKey.getPlatform().toString()) + .build()); + } + + private void mapDatasetProperties( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + final DatasetProperties gmsProperties = new DatasetProperties(dataMap); + final com.linkedin.datahub.graphql.generated.DatasetProperties properties = + new com.linkedin.datahub.graphql.generated.DatasetProperties(); + properties.setDescription(gmsProperties.getDescription()); + dataset.setDescription(gmsProperties.getDescription()); + properties.setOrigin(dataset.getOrigin()); + if (gmsProperties.getExternalUrl() != null) { + properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - - private void mapDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - final DatasetProperties gmsProperties = new DatasetProperties(dataMap); - final com.linkedin.datahub.graphql.generated.DatasetProperties properties = - new com.linkedin.datahub.graphql.generated.DatasetProperties(); - properties.setDescription(gmsProperties.getDescription()); - dataset.setDescription(gmsProperties.getDescription()); - properties.setOrigin(dataset.getOrigin()); - if (gmsProperties.getExternalUrl() != null) { - properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); - } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); - if (gmsProperties.getName() != null) { - properties.setName(gmsProperties.getName()); - } else { - properties.setName(dataset.getName()); - } - properties.setQualifiedName(gmsProperties.getQualifiedName()); - dataset.setProperties(properties); - dataset.setDescription(properties.getDescription()); - if (gmsProperties.getUri() != null) { - dataset.setUri(gmsProperties.getUri().toString()); - } - TimeStamp created = gmsProperties.getCreated(); - if (created != null) { - properties.setCreated(created.getTime()); - if (created.hasActor()) { - properties.setCreatedActor(created.getActor().toString()); - } - } - TimeStamp lastModified = gmsProperties.getLastModified(); - if (lastModified != null) { - properties.setLastModified(lastModified.getTime()); - if (lastModified.hasActor()) { - properties.setLastModifiedActor(lastModified.getActor().toString()); - } - } + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + if (gmsProperties.getName() != null) { + properties.setName(gmsProperties.getName()); + } else { + properties.setName(dataset.getName()); } - - private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); - final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); - editableProperties.setDescription(editableDatasetProperties.getDescription()); - dataset.setEditableProperties(editableProperties); + properties.setQualifiedName(gmsProperties.getQualifiedName()); + dataset.setProperties(properties); + dataset.setDescription(properties.getDescription()); + if (gmsProperties.getUri() != null) { + dataset.setUri(gmsProperties.getUri().toString()); } - - private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final ViewProperties properties = new ViewProperties(dataMap); - final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = - new com.linkedin.datahub.graphql.generated.ViewProperties(); - graphqlProperties.setMaterialized(properties.isMaterialized()); - graphqlProperties.setLanguage(properties.getViewLanguage()); - graphqlProperties.setLogic(properties.getViewLogic()); - dataset.setViewProperties(graphqlProperties); + TimeStamp created = gmsProperties.getCreated(); + if (created != null) { + properties.setCreated(created.getTime()); + if (created.hasActor()) { + properties.setCreatedActor(created.getActor().toString()); + } } - - private void mapGlobalTags(@Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); - dataset.setGlobalTags(globalTags); - dataset.setTags(globalTags); + TimeStamp lastModified = gmsProperties.getLastModified(); + if (lastModified != null) { + Urn actor = lastModified.getActor(); + properties.setLastModified( + new AuditStamp(lastModified.getTime(), actor == null ? null : actor.toString())); + properties.setLastModifiedActor(actor == null ? null : actor.toString()); + } else { + properties.setLastModified(new AuditStamp(0L, null)); } - - private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() + } + + private void mapEditableDatasetProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); + final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); + editableProperties.setDescription(editableDatasetProperties.getDescription()); + dataset.setEditableProperties(editableProperties); + } + + private void mapViewProperties(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final ViewProperties properties = new ViewProperties(dataMap); + final com.linkedin.datahub.graphql.generated.ViewProperties graphqlProperties = + new com.linkedin.datahub.graphql.generated.ViewProperties(); + graphqlProperties.setMaterialized(properties.isMaterialized()); + graphqlProperties.setLanguage(properties.getViewLanguage()); + graphqlProperties.setLogic(properties.getViewLogic()); + dataset.setViewProperties(graphqlProperties); + } + + private void mapGlobalTags( + @Nonnull Dataset dataset, @Nonnull DataMap dataMap, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + dataset.setGlobalTags(globalTags); + dataset.setTags(globalTags); + } + + private void mapContainers(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() .setType(EntityType.CONTAINER) .setUrn(gmsContainer.getContainer().toString()) .build()); - } + } - private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); - } + private void mapDomains(@Nonnull Dataset dataset, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + dataset.setDomain(DomainAssociationMapper.map(domains, dataset.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java index dbaaf27a3f2bc9..25639e431fac12 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapper.java @@ -8,20 +8,22 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class DatasetProfileMapper implements TimeSeriesAspectMapper { +public class DatasetProfileMapper + implements TimeSeriesAspectMapper { public static final DatasetProfileMapper INSTANCE = new DatasetProfileMapper(); - public static com.linkedin.datahub.graphql.generated.DatasetProfile map(@Nonnull final EnvelopedAspect envelopedAspect) { + public static com.linkedin.datahub.graphql.generated.DatasetProfile map( + @Nonnull final EnvelopedAspect envelopedAspect) { return INSTANCE.apply(envelopedAspect); } @Override - public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull final EnvelopedAspect envelopedAspect) { + public com.linkedin.datahub.graphql.generated.DatasetProfile apply( + @Nonnull final EnvelopedAspect envelopedAspect) { - DatasetProfile gmsProfile = GenericRecordUtils - .deserializeAspect( + DatasetProfile gmsProfile = + GenericRecordUtils.deserializeAspect( envelopedAspect.getAspect().getValue(), envelopedAspect.getAspect().getContentType(), DatasetProfile.class); @@ -35,13 +37,16 @@ public com.linkedin.datahub.graphql.generated.DatasetProfile apply(@Nonnull fina result.setTimestampMillis(gmsProfile.getTimestampMillis()); if (gmsProfile.hasFieldProfiles()) { result.setFieldProfiles( - gmsProfile.getFieldProfiles().stream().map(DatasetProfileMapper::mapFieldProfile).collect(Collectors.toList())); + gmsProfile.getFieldProfiles().stream() + .map(DatasetProfileMapper::mapFieldProfile) + .collect(Collectors.toList())); } return result; } - private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile(DatasetFieldProfile gmsProfile) { + private static com.linkedin.datahub.graphql.generated.DatasetFieldProfile mapFieldProfile( + DatasetFieldProfile gmsProfile) { final com.linkedin.datahub.graphql.generated.DatasetFieldProfile result = new com.linkedin.datahub.graphql.generated.DatasetFieldProfile(); result.setFieldPath(gmsProfile.getFieldPath()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java index 78c1299ed9bd9a..0b05d420030b5e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -22,23 +24,19 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class DatasetUpdateInputMapper implements InputModelMapper, Urn> { +public class DatasetUpdateInputMapper + implements InputModelMapper, Urn> { public static final DatasetUpdateInputMapper INSTANCE = new DatasetUpdateInputMapper(); public static Collection map( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(datasetUpdateInput, actor); } @Override public Collection apply( - @Nonnull final DatasetUpdateInput datasetUpdateInput, - @Nonnull final Urn actor) { + @Nonnull final DatasetUpdateInput datasetUpdateInput, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(6); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(DATASET_ENTITY_NAME); final AuditStamp auditStamp = new AuditStamp(); @@ -46,8 +44,10 @@ public Collection apply( auditStamp.setTime(System.currentTimeMillis()); if (datasetUpdateInput.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal( - OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(datasetUpdateInput.getOwnership(), actor), + OWNERSHIP_ASPECT_NAME)); } if (datasetUpdateInput.getDeprecation() != null) { @@ -58,29 +58,32 @@ public Collection apply( } deprecation.setNote(datasetUpdateInput.getDeprecation().getNote()); deprecation.setActor(actor, SetMode.IGNORE_NULL); - proposals.add(updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(deprecation, DATASET_DEPRECATION_ASPECT_NAME)); } if (datasetUpdateInput.getInstitutionalMemory() != null) { - proposals.add(updateMappingHelper.aspectToProposal(InstitutionalMemoryUpdateMapper - .map(datasetUpdateInput.getInstitutionalMemory()), INSTITUTIONAL_MEMORY_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + InstitutionalMemoryUpdateMapper.map(datasetUpdateInput.getInstitutionalMemory()), + INSTITUTIONAL_MEMORY_ASPECT_NAME)); } if (datasetUpdateInput.getTags() != null || datasetUpdateInput.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); if (datasetUpdateInput.getGlobalTags() != null) { - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getGlobalTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } else { // Tags field overrides deprecated globalTags field - globalTags.setTags(new TagAssociationArray(datasetUpdateInput.getTags() - .getTags() - .stream() - .map(element -> TagAssociationUpdateMapper.map(element)) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + datasetUpdateInput.getTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); } proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } @@ -89,28 +92,32 @@ public Collection apply( final EditableSchemaMetadata editableSchemaMetadata = new EditableSchemaMetadata(); editableSchemaMetadata.setEditableSchemaFieldInfo( new EditableSchemaFieldInfoArray( - datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream().map( - element -> mapSchemaFieldInfo(element) - ).collect(Collectors.toList()))); + datasetUpdateInput.getEditableSchemaMetadata().getEditableSchemaFieldInfo().stream() + .map(element -> mapSchemaFieldInfo(element)) + .collect(Collectors.toList()))); editableSchemaMetadata.setLastModified(auditStamp); editableSchemaMetadata.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableSchemaMetadata, EDITABLE_SCHEMA_METADATA_ASPECT_NAME)); } if (datasetUpdateInput.getEditableProperties() != null) { final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(); - editableDatasetProperties.setDescription(datasetUpdateInput.getEditableProperties().getDescription()); + editableDatasetProperties.setDescription( + datasetUpdateInput.getEditableProperties().getDescription()); editableDatasetProperties.setLastModified(auditStamp); editableDatasetProperties.setCreated(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDatasetProperties, EDITABLE_DATASET_PROPERTIES_ASPECT_NAME)); } return proposals; } private EditableSchemaFieldInfo mapSchemaFieldInfo( - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo - ) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfoUpdate schemaFieldInfo) { final EditableSchemaFieldInfo output = new EditableSchemaFieldInfo(); if (schemaFieldInfo.getDescription() != null) { @@ -120,11 +127,14 @@ private EditableSchemaFieldInfo mapSchemaFieldInfo( if (schemaFieldInfo.getGlobalTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(schemaFieldInfo.getGlobalTags().getTags().stream().map( - element -> TagAssociationUpdateMapper.map(element)).collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + schemaFieldInfo.getGlobalTags().getTags().stream() + .map(element -> TagAssociationUpdateMapper.map(element)) + .collect(Collectors.toList()))); output.setGlobalTags(globalTags); } return output; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java index 922574d5051d30..f54adbe8ba26c6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaFieldInfoMapper.java @@ -4,39 +4,34 @@ import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.schema.EditableSchemaFieldInfo; - import javax.annotation.Nonnull; - public class EditableSchemaFieldInfoMapper { - public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); + public static final EditableSchemaFieldInfoMapper INSTANCE = new EditableSchemaFieldInfoMapper(); - public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( - @Nonnull final EditableSchemaFieldInfo fieldInfo, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(fieldInfo, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo map( + @Nonnull final EditableSchemaFieldInfo fieldInfo, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(fieldInfo, entityUrn); + } - public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( - @Nonnull final EditableSchemaFieldInfo input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); - if (input.hasDescription()) { - result.setDescription((input.getDescription())); - } - if (input.hasFieldPath()) { - result.setFieldPath((input.getFieldPath())); - } - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - return result; + public com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo apply( + @Nonnull final EditableSchemaFieldInfo input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo result = + new com.linkedin.datahub.graphql.generated.EditableSchemaFieldInfo(); + if (input.hasDescription()) { + result.setDescription((input.getDescription())); + } + if (input.hasFieldPath()) { + result.setFieldPath((input.getFieldPath())); + } + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java index 376558d2fd18cb..3cf012a523d544 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/EditableSchemaMetadataMapper.java @@ -1,28 +1,27 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; -import com.linkedin.schema.EditableSchemaMetadata; import com.linkedin.common.urn.Urn; - -import javax.annotation.Nonnull; +import com.linkedin.schema.EditableSchemaMetadata; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class EditableSchemaMetadataMapper { - public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - - public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( - @Nonnull final EditableSchemaMetadata metadata, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(metadata, entityUrn); - } + public static final EditableSchemaMetadataMapper INSTANCE = new EditableSchemaMetadataMapper(); - public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply(@Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); - result.setEditableSchemaFieldInfo(input.getEditableSchemaFieldInfo().stream().map(schemaField -> - EditableSchemaFieldInfoMapper.map(schemaField, entityUrn) - ).collect(Collectors.toList())); - return result; - } + public static com.linkedin.datahub.graphql.generated.EditableSchemaMetadata map( + @Nonnull final EditableSchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } + public com.linkedin.datahub.graphql.generated.EditableSchemaMetadata apply( + @Nonnull final EditableSchemaMetadata input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.EditableSchemaMetadata result = + new com.linkedin.datahub.graphql.generated.EditableSchemaMetadata(); + result.setEditableSchemaFieldInfo( + input.getEditableSchemaFieldInfo().stream() + .map(schemaField -> EditableSchemaFieldInfoMapper.map(schemaField, entityUrn)) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java index b76767fa5d0454..b99b243da5b94a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/ForeignKeyConstraintMapper.java @@ -5,14 +5,12 @@ import com.linkedin.datahub.graphql.generated.ForeignKeyConstraint; import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; -import lombok.extern.slf4j.Slf4j; - import java.util.stream.Collectors; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class ForeignKeyConstraintMapper { - private ForeignKeyConstraintMapper() { } + private ForeignKeyConstraintMapper() {} public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint constraint) { ForeignKeyConstraint result = new ForeignKeyConstraint(); @@ -22,15 +20,15 @@ public static ForeignKeyConstraint map(com.linkedin.schema.ForeignKeyConstraint } if (constraint.hasSourceFields()) { result.setSourceFields( - constraint.getSourceFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getSourceFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } if (constraint.hasForeignFields()) { result.setForeignFields( - constraint.getForeignFields().stream().map( - schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn) - ).collect(Collectors.toList())); + constraint.getForeignFields().stream() + .map(schemaFieldUrn -> mapSchemaFieldEntity(schemaFieldUrn)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java index 515cba5e99c74e..dd345bebf657f9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/PlatformSchemaMapper.java @@ -5,63 +5,66 @@ import com.linkedin.datahub.graphql.generated.TableSchema; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.schema.SchemaMetadata; - import javax.annotation.Nonnull; -public class PlatformSchemaMapper implements ModelMapper { +public class PlatformSchemaMapper + implements ModelMapper { - public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); + public static final PlatformSchemaMapper INSTANCE = new PlatformSchemaMapper(); - public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { - return INSTANCE.apply(metadata); - } + public static PlatformSchema map(@Nonnull final SchemaMetadata.PlatformSchema metadata) { + return INSTANCE.apply(metadata); + } - @Override - public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { - Object result; - if (input.isSchemaless()) { - return null; - } else if (input.isPrestoDDL()) { - final TableSchema prestoSchema = new TableSchema(); - prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); - result = prestoSchema; - } else if (input.isOracleDDL()) { - final TableSchema oracleSchema = new TableSchema(); - oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); - result = oracleSchema; - } else if (input.isMySqlDDL()) { - final TableSchema mySqlSchema = new TableSchema(); - mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); - result = mySqlSchema; - } else if (input.isKafkaSchema()) { - final TableSchema kafkaSchema = new TableSchema(); - kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); - result = kafkaSchema; - } else if (input.isOrcSchema()) { - final TableSchema orcSchema = new TableSchema(); - orcSchema.setSchema(input.getOrcSchema().getSchema()); - result = orcSchema; - } else if (input.isBinaryJsonSchema()) { - final TableSchema binaryJsonSchema = new TableSchema(); - binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); - result = binaryJsonSchema; - } else if (input.isEspressoSchema()) { - final KeyValueSchema espressoSchema = new KeyValueSchema(); - espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); - espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); - result = espressoSchema; - } else if (input.isKeyValueSchema()) { - final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); - otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); - otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); - result = otherKeyValueSchema; - } else if (input.isOtherSchema()) { - final TableSchema otherTableSchema = new TableSchema(); - otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); - result = otherTableSchema; - } else { - throw new RuntimeException(String.format("Unrecognized platform schema type %s provided", input.memberType().getType().name())); - } - return (PlatformSchema) result; + @Override + public PlatformSchema apply(@Nonnull final SchemaMetadata.PlatformSchema input) { + Object result; + if (input.isSchemaless()) { + return null; + } else if (input.isPrestoDDL()) { + final TableSchema prestoSchema = new TableSchema(); + prestoSchema.setSchema(input.getPrestoDDL().getRawSchema()); + result = prestoSchema; + } else if (input.isOracleDDL()) { + final TableSchema oracleSchema = new TableSchema(); + oracleSchema.setSchema(input.getOracleDDL().getTableSchema()); + result = oracleSchema; + } else if (input.isMySqlDDL()) { + final TableSchema mySqlSchema = new TableSchema(); + mySqlSchema.setSchema(input.getMySqlDDL().getTableSchema()); + result = mySqlSchema; + } else if (input.isKafkaSchema()) { + final TableSchema kafkaSchema = new TableSchema(); + kafkaSchema.setSchema(input.getKafkaSchema().getDocumentSchema()); + result = kafkaSchema; + } else if (input.isOrcSchema()) { + final TableSchema orcSchema = new TableSchema(); + orcSchema.setSchema(input.getOrcSchema().getSchema()); + result = orcSchema; + } else if (input.isBinaryJsonSchema()) { + final TableSchema binaryJsonSchema = new TableSchema(); + binaryJsonSchema.setSchema(input.getBinaryJsonSchema().getSchema()); + result = binaryJsonSchema; + } else if (input.isEspressoSchema()) { + final KeyValueSchema espressoSchema = new KeyValueSchema(); + espressoSchema.setKeySchema(input.getEspressoSchema().getTableSchema()); + espressoSchema.setValueSchema(input.getEspressoSchema().getDocumentSchema()); + result = espressoSchema; + } else if (input.isKeyValueSchema()) { + final KeyValueSchema otherKeyValueSchema = new KeyValueSchema(); + otherKeyValueSchema.setKeySchema(input.getKeyValueSchema().getKeySchema()); + otherKeyValueSchema.setValueSchema(input.getKeyValueSchema().getValueSchema()); + result = otherKeyValueSchema; + } else if (input.isOtherSchema()) { + final TableSchema otherTableSchema = new TableSchema(); + otherTableSchema.setSchema(input.getOtherSchema().getRawSchema()); + result = otherTableSchema; + } else { + throw new RuntimeException( + String.format( + "Unrecognized platform schema type %s provided", + input.memberType().getType().name())); } + return (PlatformSchema) result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java index f05a1adb6b443f..f53803ce5be855 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaFieldMapper.java @@ -3,72 +3,75 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.SchemaField; import com.linkedin.datahub.graphql.generated.SchemaFieldDataType; -import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; - +import com.linkedin.datahub.graphql.types.tag.mappers.GlobalTagsMapper; import javax.annotation.Nonnull; public class SchemaFieldMapper { - public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); + public static final SchemaFieldMapper INSTANCE = new SchemaFieldMapper(); - public static SchemaField map(@Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { - return INSTANCE.apply(metadata, entityUrn); - } + public static SchemaField map( + @Nonnull final com.linkedin.schema.SchemaField metadata, @Nonnull Urn entityUrn) { + return INSTANCE.apply(metadata, entityUrn); + } - public SchemaField apply(@Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { - final SchemaField result = new SchemaField(); - result.setDescription(input.getDescription()); - result.setFieldPath(input.getFieldPath()); - result.setJsonPath(input.getJsonPath()); - result.setRecursive(input.isRecursive()); - result.setNullable(input.isNullable()); - result.setNativeDataType(input.getNativeDataType()); - result.setType(mapSchemaFieldDataType(input.getType())); - result.setLabel(input.getLabel()); - if (input.hasGlobalTags()) { - result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); - } - if (input.hasGlossaryTerms()) { - result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); - } - result.setIsPartOfKey(input.isIsPartOfKey()); - result.setIsPartitioningKey(input.isIsPartitioningKey()); - return result; + public SchemaField apply( + @Nonnull final com.linkedin.schema.SchemaField input, @Nonnull Urn entityUrn) { + final SchemaField result = new SchemaField(); + result.setDescription(input.getDescription()); + result.setFieldPath(input.getFieldPath()); + result.setJsonPath(input.getJsonPath()); + result.setRecursive(input.isRecursive()); + result.setNullable(input.isNullable()); + result.setNativeDataType(input.getNativeDataType()); + result.setType(mapSchemaFieldDataType(input.getType())); + result.setLabel(input.getLabel()); + if (input.hasGlobalTags()) { + result.setGlobalTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + result.setTags(GlobalTagsMapper.map(input.getGlobalTags(), entityUrn)); + } + if (input.hasGlossaryTerms()) { + result.setGlossaryTerms(GlossaryTermsMapper.map(input.getGlossaryTerms(), entityUrn)); } + result.setIsPartOfKey(input.isIsPartOfKey()); + result.setIsPartitioningKey(input.isIsPartitioningKey()); + return result; + } - private SchemaFieldDataType mapSchemaFieldDataType(@Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { - final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); - if (type.isBytesType()) { - return SchemaFieldDataType.BYTES; - } else if (type.isFixedType()) { - return SchemaFieldDataType.FIXED; - } else if (type.isBooleanType()) { - return SchemaFieldDataType.BOOLEAN; - } else if (type.isStringType()) { - return SchemaFieldDataType.STRING; - } else if (type.isNumberType()) { - return SchemaFieldDataType.NUMBER; - } else if (type.isDateType()) { - return SchemaFieldDataType.DATE; - } else if (type.isTimeType()) { - return SchemaFieldDataType.TIME; - } else if (type.isEnumType()) { - return SchemaFieldDataType.ENUM; - } else if (type.isNullType()) { - return SchemaFieldDataType.NULL; - } else if (type.isArrayType()) { - return SchemaFieldDataType.ARRAY; - } else if (type.isMapType()) { - return SchemaFieldDataType.MAP; - } else if (type.isRecordType()) { - return SchemaFieldDataType.STRUCT; - } else if (type.isUnionType()) { - return SchemaFieldDataType.UNION; - } else { - throw new RuntimeException(String.format("Unrecognized SchemaFieldDataType provided %s", - type.memberType().toString())); - } + private SchemaFieldDataType mapSchemaFieldDataType( + @Nonnull final com.linkedin.schema.SchemaFieldDataType dataTypeUnion) { + final com.linkedin.schema.SchemaFieldDataType.Type type = dataTypeUnion.getType(); + if (type.isBytesType()) { + return SchemaFieldDataType.BYTES; + } else if (type.isFixedType()) { + return SchemaFieldDataType.FIXED; + } else if (type.isBooleanType()) { + return SchemaFieldDataType.BOOLEAN; + } else if (type.isStringType()) { + return SchemaFieldDataType.STRING; + } else if (type.isNumberType()) { + return SchemaFieldDataType.NUMBER; + } else if (type.isDateType()) { + return SchemaFieldDataType.DATE; + } else if (type.isTimeType()) { + return SchemaFieldDataType.TIME; + } else if (type.isEnumType()) { + return SchemaFieldDataType.ENUM; + } else if (type.isNullType()) { + return SchemaFieldDataType.NULL; + } else if (type.isArrayType()) { + return SchemaFieldDataType.ARRAY; + } else if (type.isMapType()) { + return SchemaFieldDataType.MAP; + } else if (type.isRecordType()) { + return SchemaFieldDataType.STRUCT; + } else if (type.isUnionType()) { + return SchemaFieldDataType.UNION; + } else { + throw new RuntimeException( + String.format( + "Unrecognized SchemaFieldDataType provided %s", type.memberType().toString())); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java index eb793cc17efb6b..d0424ba89eca1c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMapper.java @@ -4,44 +4,53 @@ import com.linkedin.datahub.graphql.generated.Schema; import com.linkedin.mxe.SystemMetadata; import com.linkedin.schema.SchemaMetadata; - +import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; -import java.util.stream.Collectors; public class SchemaMapper { - public static final SchemaMapper INSTANCE = new SchemaMapper(); + public static final SchemaMapper INSTANCE = new SchemaMapper(); - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, null, entityUrn); - } + public static Schema map(@Nonnull final SchemaMetadata metadata, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, null, entityUrn); + } - public static Schema map(@Nonnull final SchemaMetadata metadata, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - return INSTANCE.apply(metadata, systemMetadata, entityUrn); - } + public static Schema map( + @Nonnull final SchemaMetadata metadata, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(metadata, systemMetadata, entityUrn); + } - public Schema apply(@Nonnull final com.linkedin.schema.SchemaMetadata input, @Nullable final SystemMetadata systemMetadata, @Nonnull final Urn entityUrn) { - final Schema result = new Schema(); - if (input.getDataset() != null) { - result.setDatasetUrn(input.getDataset().toString()); - } - if (systemMetadata != null) { - result.setLastObserved(systemMetadata.getLastObserved()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - if (input.getForeignKeys() != null) { - result.setForeignKeys(input.getForeignKeys().stream() - .map(ForeignKeyConstraintMapper::map) - .collect(Collectors.toList())); - } - return result; + public Schema apply( + @Nonnull final com.linkedin.schema.SchemaMetadata input, + @Nullable final SystemMetadata systemMetadata, + @Nonnull final Urn entityUrn) { + final Schema result = new Schema(); + if (input.getDataset() != null) { + result.setDatasetUrn(input.getDataset().toString()); + } + if (systemMetadata != null) { + result.setLastObserved(systemMetadata.getLastObserved()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + if (input.getForeignKeys() != null) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(ForeignKeyConstraintMapper::map) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java index 00cb91bed8abb2..31381073a16dd0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/SchemaMetadataMapper.java @@ -6,43 +6,42 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class SchemaMetadataMapper { - public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); + public static final SchemaMetadataMapper INSTANCE = new SchemaMetadataMapper(); - public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(aspect, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.SchemaMetadata map( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(aspect, entityUrn); + } - public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( - @Nonnull final EnvelopedAspect aspect, - @Nonnull final Urn entityUrn - ) { - final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); - final com.linkedin.datahub.graphql.generated.SchemaMetadata result = - new com.linkedin.datahub.graphql.generated.SchemaMetadata(); + public com.linkedin.datahub.graphql.generated.SchemaMetadata apply( + @Nonnull final EnvelopedAspect aspect, @Nonnull final Urn entityUrn) { + final SchemaMetadata input = new SchemaMetadata(aspect.getValue().data()); + final com.linkedin.datahub.graphql.generated.SchemaMetadata result = + new com.linkedin.datahub.graphql.generated.SchemaMetadata(); - if (input.hasDataset()) { - result.setDatasetUrn(input.getDataset().toString()); - } - result.setName(input.getSchemaName()); - result.setPlatformUrn(input.getPlatform().toString()); - result.setVersion(input.getVersion()); - result.setCluster(input.getCluster()); - result.setHash(input.getHash()); - result.setPrimaryKeys(input.getPrimaryKeys()); - result.setFields(input.getFields().stream().map(field -> SchemaFieldMapper.map(field, entityUrn)).collect(Collectors.toList())); - result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); - result.setAspectVersion(aspect.getVersion()); - if (input.hasForeignKeys()) { - result.setForeignKeys(input.getForeignKeys().stream().map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map( - foreignKeyConstraint - )).collect(Collectors.toList())); - } - return result; + if (input.hasDataset()) { + result.setDatasetUrn(input.getDataset().toString()); + } + result.setName(input.getSchemaName()); + result.setPlatformUrn(input.getPlatform().toString()); + result.setVersion(input.getVersion()); + result.setCluster(input.getCluster()); + result.setHash(input.getHash()); + result.setPrimaryKeys(input.getPrimaryKeys()); + result.setFields( + input.getFields().stream() + .map(field -> SchemaFieldMapper.map(field, entityUrn)) + .collect(Collectors.toList())); + result.setPlatformSchema(PlatformSchemaMapper.map(input.getPlatformSchema())); + result.setAspectVersion(aspect.getVersion()); + if (input.hasForeignKeys()) { + result.setForeignKeys( + input.getForeignKeys().stream() + .map(foreignKeyConstraint -> ForeignKeyConstraintMapper.map(foreignKeyConstraint)) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java index 241c4872b1caa4..727e8629f74b2e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/VersionedDatasetMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.dataset.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; @@ -14,11 +16,11 @@ import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FabricType; import com.linkedin.datahub.graphql.generated.VersionedDataset; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DeprecationMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -38,13 +40,10 @@ import javax.annotation.Nonnull; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - - /** * Maps GMS response objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ @Slf4j public class VersionedDatasetMapper implements ModelMapper { @@ -67,28 +66,52 @@ public VersionedDataset apply(@Nonnull final EntityResponse entityResponse) { SystemMetadata schemaSystemMetadata = getSystemMetadata(aspectMap, SCHEMA_METADATA_ASPECT_NAME); mappingHelper.mapToResult(DATASET_KEY_ASPECT_NAME, this::mapDatasetKey); - mappingHelper.mapToResult(DATASET_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(DATASET_DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); - mappingHelper.mapToResult(SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setSchema(SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); - mappingHelper.mapToResult(EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); + mappingHelper.mapToResult( + DATASET_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapDatasetProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + DATASET_DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DatasetDeprecationMapper.map(new DatasetDeprecation(dataMap)))); + mappingHelper.mapToResult( + SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setSchema( + SchemaMapper.map(new SchemaMetadata(dataMap), schemaSystemMetadata, entityUrn))); + mappingHelper.mapToResult( + EDITABLE_DATASET_PROPERTIES_ASPECT_NAME, this::mapEditableDatasetProperties); mappingHelper.mapToResult(VIEW_PROPERTIES_ASPECT_NAME, this::mapViewProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (dataset, dataMap) -> - dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); - mappingHelper.mapToResult(EDITABLE_SCHEMA_METADATA_ASPECT_NAME, (dataset, dataMap) -> - dataset.setEditableSchemaMetadata(EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (dataset, dataMap) -> - dataset.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (dataset, dataMap) -> this.mapGlobalTags(dataset, dataMap, entityUrn)); + mappingHelper.mapToResult( + EDITABLE_SCHEMA_METADATA_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setEditableSchemaMetadata( + EditableSchemaMetadataMapper.map(new EditableSchemaMetadata(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(CONTAINER_ASPECT_NAME, this::mapContainers); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); return mappingHelper.getResult(); } @@ -104,12 +127,15 @@ private void mapDatasetKey(@Nonnull VersionedDataset dataset, @Nonnull DataMap d final DatasetKey gmsKey = new DatasetKey(dataMap); dataset.setName(gmsKey.getName()); dataset.setOrigin(FabricType.valueOf(gmsKey.getOrigin().toString())); - dataset.setPlatform(DataPlatform.builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(gmsKey.getPlatform().toString()).build()); + dataset.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(gmsKey.getPlatform().toString()) + .build()); } - private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { + private void mapDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, Urn entityUrn) { final DatasetProperties gmsProperties = new DatasetProperties(dataMap); final com.linkedin.datahub.graphql.generated.DatasetProperties properties = new com.linkedin.datahub.graphql.generated.DatasetProperties(); @@ -118,7 +144,8 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da if (gmsProperties.getExternalUrl() != null) { properties.setExternalUrl(gmsProperties.getExternalUrl().toString()); } - properties.setCustomProperties(CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); + properties.setCustomProperties( + CustomPropertiesMapper.map(gmsProperties.getCustomProperties(), entityUrn)); if (gmsProperties.getName() != null) { properties.setName(gmsProperties.getName()); } else { @@ -128,8 +155,10 @@ private void mapDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull Da dataset.setProperties(properties); } - private void mapEditableDatasetProperties(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final EditableDatasetProperties editableDatasetProperties = new EditableDatasetProperties(dataMap); + private void mapEditableDatasetProperties( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { + final EditableDatasetProperties editableDatasetProperties = + new EditableDatasetProperties(dataMap); final DatasetEditableProperties editableProperties = new DatasetEditableProperties(); editableProperties.setDescription(editableDatasetProperties.getDescription()); dataset.setEditableProperties(editableProperties); @@ -145,18 +174,21 @@ private void mapViewProperties(@Nonnull VersionedDataset dataset, @Nonnull DataM dataset.setViewProperties(graphqlProperties); } - private void mapGlobalTags(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlobalTags globalTags = GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); + private void mapGlobalTags( + @Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap, @Nonnull Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlobalTags globalTags = + GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn); dataset.setTags(globalTags); } private void mapContainers(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { - final com.linkedin.container.Container gmsContainer = new com.linkedin.container.Container(dataMap); - dataset.setContainer(Container - .builder() - .setType(EntityType.CONTAINER) - .setUrn(gmsContainer.getContainer().toString()) - .build()); + final com.linkedin.container.Container gmsContainer = + new com.linkedin.container.Container(dataMap); + dataset.setContainer( + Container.builder() + .setType(EntityType.CONTAINER) + .setUrn(gmsContainer.getContainer().toString()) + .build()); } private void mapDomains(@Nonnull VersionedDataset dataset, @Nonnull DataMap dataMap) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java index df8de87ff69ff1..51ef254f52225f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainAssociationMapper.java @@ -5,32 +5,32 @@ import com.linkedin.datahub.graphql.generated.EntityType; import javax.annotation.Nonnull; - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class DomainAssociationMapper { - public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); + public static final DomainAssociationMapper INSTANCE = new DomainAssociationMapper(); - public static DomainAssociation map( - @Nonnull final com.linkedin.domain.Domains domains, - @Nonnull final String entityUrn - ) { - return INSTANCE.apply(domains, entityUrn); - } + public static DomainAssociation map( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + return INSTANCE.apply(domains, entityUrn); + } - public DomainAssociation apply(@Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { - if (domains.getDomains().size() > 0) { - DomainAssociation association = new DomainAssociation(); - association.setDomain(Domain.builder() - .setType(EntityType.DOMAIN) - .setUrn(domains.getDomains().get(0).toString()).build()); - association.setAssociatedUrn(entityUrn); - return association; - } - return null; + public DomainAssociation apply( + @Nonnull final com.linkedin.domain.Domains domains, @Nonnull final String entityUrn) { + if (domains.getDomains().size() > 0) { + DomainAssociation association = new DomainAssociation(); + association.setDomain( + Domain.builder() + .setType(EntityType.DOMAIN) + .setUrn(domains.getDomains().get(0).toString()) + .build()); + association.setAssociatedUrn(entityUrn); + return association; } + return null; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java index fe52b5eff718fc..7ff1f70311b22e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainMapper.java @@ -14,7 +14,6 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; - public class DomainMapper { public static Domain map(final EntityResponse entityResponse) { @@ -33,30 +32,38 @@ public static Domain map(final EntityResponse entityResponse) { return null; } - final EnvelopedAspect envelopedDomainProperties = aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); + final EnvelopedAspect envelopedDomainProperties = + aspects.get(Constants.DOMAIN_PROPERTIES_ASPECT_NAME); if (envelopedDomainProperties != null) { - result.setProperties(mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); + result.setProperties( + mapDomainProperties(new DomainProperties(envelopedDomainProperties.getValue().data()))); } final EnvelopedAspect envelopedOwnership = aspects.get(Constants.OWNERSHIP_ASPECT_NAME); if (envelopedOwnership != null) { - result.setOwnership(OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); + result.setOwnership( + OwnershipMapper.map(new Ownership(envelopedOwnership.getValue().data()), entityUrn)); } - final EnvelopedAspect envelopedInstitutionalMemory = aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); + final EnvelopedAspect envelopedInstitutionalMemory = + aspects.get(Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); if (envelopedInstitutionalMemory != null) { - result.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); + result.setInstitutionalMemory( + InstitutionalMemoryMapper.map( + new InstitutionalMemory(envelopedInstitutionalMemory.getValue().data()), entityUrn)); } return result; } - private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties(final DomainProperties gmsProperties) { - final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = new com.linkedin.datahub.graphql.generated.DomainProperties(); + private static com.linkedin.datahub.graphql.generated.DomainProperties mapDomainProperties( + final DomainProperties gmsProperties) { + final com.linkedin.datahub.graphql.generated.DomainProperties propertiesResult = + new com.linkedin.datahub.graphql.generated.DomainProperties(); propertiesResult.setName(gmsProperties.getName()); propertiesResult.setDescription(gmsProperties.getDescription()); return propertiesResult; } - private DomainMapper() { } + private DomainMapper() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java index 4879c339d99faa..06d5df9354380d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/domain/DomainType.java @@ -17,8 +17,6 @@ import com.linkedin.metadata.query.AutoCompleteResult; import com.linkedin.metadata.query.filter.Filter; import graphql.execution.DataFetcherResult; -import org.apache.commons.lang3.NotImplementedException; - import java.net.URISyntaxException; import java.util.ArrayList; import java.util.HashSet; @@ -29,19 +27,21 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; import javax.annotation.Nullable; +import org.apache.commons.lang3.NotImplementedException; +public class DomainType + implements SearchableEntityType, + com.linkedin.datahub.graphql.types.EntityType { -public class DomainType implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - Constants.OWNERSHIP_ASPECT_NAME, - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + Constants.OWNERSHIP_ASPECT_NAME, + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME); private final EntityClient _entityClient; - public DomainType(final EntityClient entityClient) { + public DomainType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -61,28 +61,30 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List domainUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List domainUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.DOMAIN_ENTITY_NAME, - new HashSet<>(domainUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.DOMAIN_ENTITY_NAME, + new HashSet<>(domainUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : domainUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(DomainMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DomainMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Domains", e); @@ -90,25 +92,31 @@ public List> batchLoad(@Nonnull List urns, @No } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { - throw new NotImplementedException("Searchable type (deprecated) not implemented on Domain entity type"); + @Nonnull final QueryContext context) + throws Exception { + throw new NotImplementedException( + "Searchable type (deprecated) not implemented on Domain entity type"); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.DOMAIN_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } - private Urn getUrn(final String urnStr) { try { return Urn.createFromString(urnStr); @@ -116,4 +124,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java index f2c9e962811b90..9a27a1fba853fe 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryNodeType.java @@ -1,17 +1,21 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; +import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Entity; -import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.GlossaryNode; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryNodeMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; - import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -20,18 +24,12 @@ import java.util.function.Function; import java.util.stream.Collectors; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_KEY_ASPECT_NAME; -import static com.linkedin.metadata.Constants.OWNERSHIP_ASPECT_NAME; - -public class GlossaryNodeType implements com.linkedin.datahub.graphql.types.EntityType { +public class GlossaryNodeType + implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_NODE_KEY_ASPECT_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME - ); + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_NODE_KEY_ASPECT_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, OWNERSHIP_ASPECT_NAME); private final EntityClient _entityClient; @@ -55,25 +53,31 @@ public Function getKeyProvider() { } @Override - public List> batchLoad(final List urns, final QueryContext context) { - final List glossaryNodeUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + final List urns, final QueryContext context) { + final List glossaryNodeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map glossaryNodeMap = _entityClient.batchGetV2(GLOSSARY_NODE_ENTITY_NAME, - new HashSet<>(glossaryNodeUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map glossaryNodeMap = + _entityClient.batchGetV2( + GLOSSARY_NODE_ENTITY_NAME, + new HashSet<>(glossaryNodeUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : glossaryNodeUrns) { gmsResults.add(glossaryNodeMap.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsGlossaryNode -> - gmsGlossaryNode == null ? null - : DataFetcherResult.newResult() - .data(GlossaryNodeMapper.map(gmsGlossaryNode)) - .build()) + .map( + gmsGlossaryNode -> + gmsGlossaryNode == null + ? null + : DataFetcherResult.newResult() + .data(GlossaryNodeMapper.map(gmsGlossaryNode)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load GlossaryNodes", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java index 3574c17a50923f..c40740238f61e1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.glossary; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.ArrayList; @@ -39,118 +42,135 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class GlossaryTermType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of(""); - - private static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - GLOSSARY_TERM_KEY_ASPECT_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, - GLOSSARY_RELATED_TERM_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - BROWSE_PATHS_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - DEPRECATION_ASPECT_NAME - ); - - private final EntityClient _entityClient; - - public GlossaryTermType(final EntityClient entityClient) { - _entityClient = entityClient; +public class GlossaryTermType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of(""); + + private static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + GLOSSARY_TERM_KEY_ASPECT_NAME, + GLOSSARY_TERM_INFO_ASPECT_NAME, + GLOSSARY_RELATED_TERM_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + BROWSE_PATHS_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + DEPRECATION_ASPECT_NAME); + + private final EntityClient _entityClient; + + public GlossaryTermType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return GlossaryTerm.class; + } + + @Override + public EntityType type() { + return EntityType.GLOSSARY_TERM; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + final List glossaryTermUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map glossaryTermMap = + _entityClient.batchGetV2( + GLOSSARY_TERM_ENTITY_NAME, + new HashSet<>(glossaryTermUrns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : glossaryTermUrns) { + gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsGlossaryTerm -> + gmsGlossaryTerm == null + ? null + : DataFetcherResult.newResult() + .data(GlossaryTermMapper.map(gmsGlossaryTerm)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load GlossaryTerms", e); } - - @Override - public Class objectClass() { - return GlossaryTerm.class; - } - - @Override - public EntityType type() { - return EntityType.GLOSSARY_TERM; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - final List glossaryTermUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map glossaryTermMap = _entityClient.batchGetV2(GLOSSARY_TERM_ENTITY_NAME, - new HashSet<>(glossaryTermUrns), ASPECTS_TO_RESOLVE, context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : glossaryTermUrns) { - gmsResults.add(glossaryTermMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsGlossaryTerm -> - gmsGlossaryTerm == null ? null - : DataFetcherResult.newResult() - .data(GlossaryTermMapper.map(gmsGlossaryTerm)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load GlossaryTerms", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search( - "glossaryTerm", query, facetFilters, start, count, context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete( + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "glossaryTerm", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( "glossaryTerm", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "glossaryTerm", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); - } - + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "glossaryTerm", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + GlossaryTermUtils.getGlossaryTermUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java index 93b6ab53d5a3ac..59f7cc8a9c8284 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/GlossaryTermUtils.java @@ -1,27 +1,27 @@ package com.linkedin.datahub.graphql.types.glossary; import com.linkedin.common.urn.GlossaryTermUrn; - import java.net.URISyntaxException; import java.util.regex.Pattern; public class GlossaryTermUtils { - private GlossaryTermUtils() { } + private GlossaryTermUtils() {} - static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { - try { - return GlossaryTermUrn.createFromString(urnStr); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); - } + static GlossaryTermUrn getGlossaryTermUrn(String urnStr) { + try { + return GlossaryTermUrn.createFromString(urnStr); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format("Failed to retrieve glossary with urn %s, invalid urn", urnStr)); } + } - public static String getGlossaryTermName(String hierarchicalName) { - if (hierarchicalName.contains(".")) { - String[] nodes = hierarchicalName.split(Pattern.quote(".")); - return nodes[nodes.length - 1]; - } - return hierarchicalName; + public static String getGlossaryTermName(String hierarchicalName) { + if (hierarchicalName.contains(".")) { + String[] nodes = hierarchicalName.split(Pattern.quote(".")); + return nodes[nodes.length - 1]; } + return hierarchicalName; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java index 6a1d849dd23bf5..901361eb0b2be6 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryNodeMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -13,11 +15,8 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.key.GlossaryNodeKey; - import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class GlossaryNodeMapper implements ModelMapper { public static final GlossaryNodeMapper INSTANCE = new GlossaryNodeMapper(); @@ -35,11 +34,14 @@ public GlossaryNode apply(@Nonnull final EntityResponse entityResponse) { EnvelopedAspectMap aspectMap = entityResponse.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_NODE_INFO_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); + mappingHelper.mapToResult( + GLOSSARY_NODE_INFO_ASPECT_NAME, + (glossaryNode, dataMap) -> glossaryNode.setProperties(mapGlossaryNodeProperties(dataMap))); mappingHelper.mapToResult(GLOSSARY_NODE_KEY_ASPECT_NAME, this::mapGlossaryNodeKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryNode, dataMap) -> - glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryNode, dataMap) -> + glossaryNode.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java index 2f99700bc30a14..12ba8c1e088f09 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermInfoMapper.java @@ -1,41 +1,44 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.GlossaryTermInfo; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermInfoMapper { - public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); + public static final GlossaryTermInfoMapper INSTANCE = new GlossaryTermInfoMapper(); - public static GlossaryTermInfo map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - return INSTANCE.apply(glossaryTermInfo, entityUrn); - } + public static GlossaryTermInfo map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + return INSTANCE.apply(glossaryTermInfo, entityUrn); + } - public GlossaryTermInfo apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); - glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); - glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); - if (glossaryTermInfo.hasName()) { - glossaryTermInfoResult.setName(glossaryTermInfo.getName()); - } - if (glossaryTermInfo.hasSourceRef()) { - glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); - } - if (glossaryTermInfo.hasSourceUrl()) { - glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); - } - if (glossaryTermInfo.hasCustomProperties()) { - glossaryTermInfoResult.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); - } - return glossaryTermInfoResult; + public GlossaryTermInfo apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermInfo glossaryTermInfoResult = + new com.linkedin.datahub.graphql.generated.GlossaryTermInfo(); + glossaryTermInfoResult.setDefinition(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setDescription(glossaryTermInfo.getDefinition()); + glossaryTermInfoResult.setTermSource(glossaryTermInfo.getTermSource()); + if (glossaryTermInfo.hasName()) { + glossaryTermInfoResult.setName(glossaryTermInfo.getName()); + } + if (glossaryTermInfo.hasSourceRef()) { + glossaryTermInfoResult.setSourceRef(glossaryTermInfo.getSourceRef()); + } + if (glossaryTermInfo.hasSourceUrl()) { + glossaryTermInfoResult.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); + } + if (glossaryTermInfo.hasCustomProperties()) { + glossaryTermInfoResult.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } + return glossaryTermInfoResult; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java index c98177b458dea3..a02f79535399f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Deprecation; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.Ownership; @@ -15,71 +17,82 @@ import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.metadata.key.GlossaryTermKey; -import com.linkedin.domain.Domains; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermMapper implements ModelMapper { - public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); + public static final GlossaryTermMapper INSTANCE = new GlossaryTermMapper(); - public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static GlossaryTerm map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { - GlossaryTerm result = new GlossaryTerm(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public GlossaryTerm apply(@Nonnull final EntityResponse entityResponse) { + GlossaryTerm result = new GlossaryTerm(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.GLOSSARY_TERM); - final String legacyName = GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.GLOSSARY_TERM); + final String legacyName = + GlossaryTermUtils.getGlossaryTermName(entityResponse.getUrn().getId()); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setGlossaryTermInfo(GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(GLOSSARY_TERM_INFO_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setProperties(GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (glossaryTerm, dataMap) -> - glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (dataset, dataMap) -> - dataset.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(GLOSSARY_TERM_KEY_ASPECT_NAME, this::mapGlossaryTermKey); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setGlossaryTermInfo( + GlossaryTermInfoMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERM_INFO_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setProperties( + GlossaryTermPropertiesMapper.map(new GlossaryTermInfo(dataMap), entityUrn))); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (glossaryTerm, dataMap) -> + glossaryTerm.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - // If there's no name property, resort to the legacy name computation. - if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { - result.getGlossaryTermInfo().setName(legacyName); - } - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } - return mappingHelper.getResult(); + // If there's no name property, resort to the legacy name computation. + if (result.getGlossaryTermInfo() != null && result.getGlossaryTermInfo().getName() == null) { + result.getGlossaryTermInfo().setName(legacyName); } - - private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); - glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); - glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } + return mappingHelper.getResult(); + } - private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); - } + private void mapGlossaryTermKey(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + GlossaryTermKey glossaryTermKey = new GlossaryTermKey(dataMap); + glossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(glossaryTermKey.getName())); + glossaryTerm.setHierarchicalName(glossaryTermKey.getName()); + } + + private void mapDomains(@Nonnull GlossaryTerm glossaryTerm, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + glossaryTerm.setDomain(DomainAssociationMapper.map(domains, glossaryTerm.getUrn())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java index 6b358331833937..94edfcbd314552 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermPropertiesMapper.java @@ -2,25 +2,27 @@ import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.GlossaryTermProperties; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermPropertiesMapper { public static final GlossaryTermPropertiesMapper INSTANCE = new GlossaryTermPropertiesMapper(); - public static GlossaryTermProperties map(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + public static GlossaryTermProperties map( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { return INSTANCE.apply(glossaryTermInfo, entityUrn); } - public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); + public GlossaryTermProperties apply( + @Nonnull final com.linkedin.glossary.GlossaryTermInfo glossaryTermInfo, Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTermProperties result = + new com.linkedin.datahub.graphql.generated.GlossaryTermProperties(); result.setDefinition(glossaryTermInfo.getDefinition()); result.setDescription(glossaryTermInfo.getDefinition()); result.setTermSource(glossaryTermInfo.getTermSource()); @@ -34,7 +36,8 @@ public GlossaryTermProperties apply(@Nonnull final com.linkedin.glossary.Glossar result.setSourceUrl(glossaryTermInfo.getSourceUrl().toString()); } if (glossaryTermInfo.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); + result.setCustomProperties( + CustomPropertiesMapper.map(glossaryTermInfo.getCustomProperties(), entityUrn)); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java index a64b0f7dc64fbe..8494eace222448 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/glossary/mappers/GlossaryTermsMapper.java @@ -1,51 +1,52 @@ package com.linkedin.datahub.graphql.types.glossary.mappers; +import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.common.urn.Urn; -import javax.annotation.Nonnull; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.datahub.graphql.generated.GlossaryTerms; -import com.linkedin.common.GlossaryTermAssociation; import com.linkedin.datahub.graphql.generated.GlossaryTerm; +import com.linkedin.datahub.graphql.generated.GlossaryTerms; import com.linkedin.datahub.graphql.types.glossary.GlossaryTermUtils; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class GlossaryTermsMapper { - public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); - - public static GlossaryTerms map( - @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(glossaryTerms, entityUrn); - } - - public GlossaryTerms apply(@Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.GlossaryTerms result = new com.linkedin.datahub.graphql.generated.GlossaryTerms(); - result.setTerms(glossaryTerms.getTerms().stream().map( - association -> this.mapGlossaryTermAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( - @Nonnull final GlossaryTermAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); - final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); - resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); - resultGlossaryTerm.setUrn(input.getUrn().toString()); - resultGlossaryTerm.setName(GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); - result.setTerm(resultGlossaryTerm); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } - + public static final GlossaryTermsMapper INSTANCE = new GlossaryTermsMapper(); + + public static GlossaryTerms map( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + return INSTANCE.apply(glossaryTerms, entityUrn); + } + + public GlossaryTerms apply( + @Nonnull final com.linkedin.common.GlossaryTerms glossaryTerms, + @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.GlossaryTerms result = + new com.linkedin.datahub.graphql.generated.GlossaryTerms(); + result.setTerms( + glossaryTerms.getTerms().stream() + .map(association -> this.mapGlossaryTermAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private com.linkedin.datahub.graphql.generated.GlossaryTermAssociation mapGlossaryTermAssociation( + @Nonnull final GlossaryTermAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlossaryTermAssociation result = + new com.linkedin.datahub.graphql.generated.GlossaryTermAssociation(); + final GlossaryTerm resultGlossaryTerm = new GlossaryTerm(); + resultGlossaryTerm.setType(EntityType.GLOSSARY_TERM); + resultGlossaryTerm.setUrn(input.getUrn().toString()); + resultGlossaryTerm.setName( + GlossaryTermUtils.getGlossaryTermName(input.getUrn().getNameEntity())); + result.setTerm(resultGlossaryTerm); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java index d575a81f4ae038..621fcf5f041403 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/AutoCompleteResultsMapper.java @@ -3,26 +3,27 @@ import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.query.AutoCompleteResult; - import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class AutoCompleteResultsMapper + implements ModelMapper { -public class AutoCompleteResultsMapper implements ModelMapper { - - public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); + public static final AutoCompleteResultsMapper INSTANCE = new AutoCompleteResultsMapper(); - public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { - return INSTANCE.apply(results); - } + public static AutoCompleteResults map(@Nonnull final AutoCompleteResult results) { + return INSTANCE.apply(results); + } - @Override - public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { - final AutoCompleteResults result = new AutoCompleteResults(); - result.setQuery(input.getQuery()); - result.setSuggestions(input.getSuggestions()); - result.setEntities(input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect( - Collectors.toList())); - return result; - } + @Override + public AutoCompleteResults apply(@Nonnull final AutoCompleteResult input) { + final AutoCompleteResults result = new AutoCompleteResults(); + result.setQuery(input.getQuery()); + result.setSuggestions(input.getSuggestions()); + result.setEntities( + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java index ea44c4409b7097..689ff82147e15f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathMapper.java @@ -2,27 +2,27 @@ import com.linkedin.datahub.graphql.Constants; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class BrowsePathMapper implements ModelMapper { - public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); + public static final BrowsePathMapper INSTANCE = new BrowsePathMapper(); - public static BrowsePath map(@Nonnull final String input) { - return INSTANCE.apply(input); - } + public static BrowsePath map(@Nonnull final String input) { + return INSTANCE.apply(input); + } - @Override - public BrowsePath apply(@Nonnull final String input) { - final BrowsePath browsePath = new BrowsePath(); - final List path = Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) - .filter(pathComponent -> !"".equals(pathComponent)) - .collect(Collectors.toList()); - browsePath.setPath(path); - return browsePath; - } + @Override + public BrowsePath apply(@Nonnull final String input) { + final BrowsePath browsePath = new BrowsePath(); + final List path = + Arrays.stream(input.split(Constants.BROWSE_PATH_DELIMITER)) + .filter(pathComponent -> !"".equals(pathComponent)) + .collect(Collectors.toList()); + browsePath.setPath(path); + return browsePath; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java index 4dac4468a80d5e..ae70823d675d8a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowsePathsMapper.java @@ -1,25 +1,24 @@ package com.linkedin.datahub.graphql.types.mappers; import com.linkedin.datahub.graphql.generated.BrowsePath; - -import javax.annotation.Nonnull; import java.util.ArrayList; import java.util.List; +import javax.annotation.Nonnull; public class BrowsePathsMapper implements ModelMapper, List> { - public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); + public static final BrowsePathsMapper INSTANCE = new BrowsePathsMapper(); - public static List map(@Nonnull final List input) { - return INSTANCE.apply(input); - } + public static List map(@Nonnull final List input) { + return INSTANCE.apply(input); + } - @Override - public List apply(@Nonnull final List input) { - List results = new ArrayList<>(); - for (String pathStr : input) { - results.add(BrowsePathMapper.map(pathStr)); - } - return results; + @Override + public List apply(@Nonnull final List input) { + List results = new ArrayList<>(); + for (String pathStr : input) { + results.add(BrowsePathMapper.map(pathStr)); } + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java index c3e74c28fe59dc..5cac03b19a74c1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/BrowseResultMapper.java @@ -9,10 +9,8 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class BrowseResultMapper { - private BrowseResultMapper() { - } + private BrowseResultMapper() {} public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) { final BrowseResults result = new BrowseResults(); @@ -31,7 +29,9 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) result.setMetadata(browseResultMetadata); List entities = - input.getEntities().stream().map(entity -> UrnToEntityMapper.map(entity.getUrn())).collect(Collectors.toList()); + input.getEntities().stream() + .map(entity -> UrnToEntityMapper.map(entity.getUrn())) + .collect(Collectors.toList()); result.setEntities(entities); List groups = @@ -41,7 +41,8 @@ public static BrowseResults map(com.linkedin.metadata.browse.BrowseResult input) return result; } - private static BrowseResultGroup mapGroup(@Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { + private static BrowseResultGroup mapGroup( + @Nonnull final com.linkedin.metadata.browse.BrowseResultGroup group) { final BrowseResultGroup result = new BrowseResultGroup(); result.setName(group.getName()); result.setCount(group.getCount()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java index e6172debb439e2..c58341f994d4f3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/InputModelMapper.java @@ -1,8 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Maps an input of type I to an output of type O with actor context. - */ +/** Maps an input of type I to an output of type O with actor context. */ public interface InputModelMapper { - O apply(final I input, final A actor); -} \ No newline at end of file + O apply(final I input, final A actor); +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java index 2a615b24eaac20..7c7dab2e024720 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/MapperUtils.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; +import static com.linkedin.metadata.utils.SearchUtil.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.AggregationMetadata; import com.linkedin.datahub.graphql.generated.FacetMetadata; @@ -10,75 +13,87 @@ import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.utils.SearchUtils; -import lombok.extern.slf4j.Slf4j; - import java.net.URISyntaxException; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; -import static com.linkedin.metadata.utils.SearchUtil.*; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class MapperUtils { - private MapperUtils() { - - } + private MapperUtils() {} public static SearchResult mapResult(SearchEntity searchEntity) { - return new SearchResult(UrnToEntityMapper.map(searchEntity.getEntity()), + return new SearchResult( + UrnToEntityMapper.map(searchEntity.getEntity()), getInsightsFromFeatures(searchEntity.getFeatures()), getMatchedFieldEntry(searchEntity.getMatchedFields())); } - public static FacetMetadata mapFacet(com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { + public static FacetMetadata mapFacet( + com.linkedin.metadata.search.AggregationMetadata aggregationMetadata) { final FacetMetadata facetMetadata = new FacetMetadata(); - List aggregationFacets = List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); - List isEntityTypeFilter = aggregationFacets.stream().map( - facet -> facet.equals("entity") || facet.contains("_entityType")).collect(Collectors.toList()); + List aggregationFacets = + List.of(aggregationMetadata.getName().split(AGGREGATION_SEPARATOR_CHAR)); + List isEntityTypeFilter = + aggregationFacets.stream() + .map(facet -> facet.equals("entity") || facet.contains("_entityType")) + .collect(Collectors.toList()); facetMetadata.setField(aggregationMetadata.getName()); facetMetadata.setDisplayName( - Optional.ofNullable(aggregationMetadata.getDisplayName()).orElse(aggregationMetadata.getName())); - facetMetadata.setAggregations(aggregationMetadata.getFilterValues() - .stream() - .map(filterValue -> new AggregationMetadata(convertFilterValue(filterValue.getValue(), isEntityTypeFilter), - filterValue.getFacetCount(), - filterValue.getEntity() == null ? null : UrnToEntityMapper.map(filterValue.getEntity()))) - .collect(Collectors.toList())); + Optional.ofNullable(aggregationMetadata.getDisplayName()) + .orElse(aggregationMetadata.getName())); + facetMetadata.setAggregations( + aggregationMetadata.getFilterValues().stream() + .map( + filterValue -> + new AggregationMetadata( + convertFilterValue(filterValue.getValue(), isEntityTypeFilter), + filterValue.getFacetCount(), + filterValue.getEntity() == null + ? null + : UrnToEntityMapper.map(filterValue.getEntity()))) + .collect(Collectors.toList())); return facetMetadata; } public static String convertFilterValue(String filterValue, List isEntityTypeFilter) { String[] aggregations = filterValue.split(AGGREGATION_SEPARATOR_CHAR); - return IntStream.range(0, aggregations.length).mapToObj( - idx -> idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) ? EntityTypeMapper.getType(aggregations[idx]).toString() : aggregations[idx]) + return IntStream.range(0, aggregations.length) + .mapToObj( + idx -> + idx < isEntityTypeFilter.size() && isEntityTypeFilter.get(idx) + ? EntityTypeMapper.getType(aggregations[idx]).toString() + : aggregations[idx]) .collect(Collectors.joining(AGGREGATION_SEPARATOR_CHAR)); } - public static List getMatchedFieldEntry(List highlightMetadata) { + public static List getMatchedFieldEntry( + List highlightMetadata) { return highlightMetadata.stream() - .map(field -> { - MatchedField matchedField = new MatchedField(); - matchedField.setName(field.getName()); - matchedField.setValue(field.getValue()); - if (SearchUtils.isUrn(field.getValue())) { - try { + .map( + field -> { + MatchedField matchedField = new MatchedField(); + matchedField.setName(field.getName()); + matchedField.setValue(field.getValue()); + if (SearchUtils.isUrn(field.getValue())) { + try { Urn urn = Urn.createFromString(field.getValue()); matchedField.setEntity(UrnToEntityMapper.map(urn)); - } catch (URISyntaxException e) { + } catch (URISyntaxException e) { log.debug("Failed to create urn from MatchedField value: {}", field.getValue()); + } } - } - return matchedField; - }) + return matchedField; + }) .collect(Collectors.toList()); } - public static SearchSuggestion mapSearchSuggestion(com.linkedin.metadata.search.SearchSuggestion suggestion) { - return new SearchSuggestion(suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); + public static SearchSuggestion mapSearchSuggestion( + com.linkedin.metadata.search.SearchSuggestion suggestion) { + return new SearchSuggestion( + suggestion.getText(), suggestion.getScore(), Math.toIntExact(suggestion.getFrequency())); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java index 08afbd510b98f3..2167be9f27ca8f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/ModelMapper.java @@ -1,9 +1,6 @@ package com.linkedin.datahub.graphql.types.mappers; -/** - * Simple interface for classes capable of mapping an input of type I to - * an output of type O. - */ +/** Simple interface for classes capable of mapping an input of type I to an output of type O. */ public interface ModelMapper { - O apply(final I input); + O apply(final I input); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java index 903e9625247346..e0ac0336c8715d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/TimeSeriesAspectMapper.java @@ -3,7 +3,5 @@ import com.linkedin.datahub.graphql.generated.TimeSeriesAspect; import com.linkedin.metadata.aspect.EnvelopedAspect; - -public interface TimeSeriesAspectMapper extends ModelMapper { - -} +public interface TimeSeriesAspectMapper + extends ModelMapper {} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java index dd00727fc2845a..baf632ae8bdf4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollAcrossLineageResultsMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; @@ -12,10 +15,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnScrollAcrossLineageResultsMapper { public static ScrollAcrossLineageResults map( LineageScrollResult searchResult) { @@ -30,8 +29,12 @@ public ScrollAcrossLineageResults apply(LineageScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java index fd774d73f3df78..72eb71cd095bb2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnScrollResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnScrollResultsMapper { public static ScrollResults map( com.linkedin.metadata.search.ScrollResult scrollResult) { @@ -25,8 +24,12 @@ public ScrollResults apply(com.linkedin.metadata.search.ScrollResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java index ae87d0269c188a..642fe90cf2aedf 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchAcrossLineageResultsMapper.java @@ -1,23 +1,22 @@ package com.linkedin.datahub.graphql.types.mappers; +import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; +import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; + import com.linkedin.common.UrnArray; import com.linkedin.data.template.RecordTemplate; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityPath; import com.linkedin.datahub.graphql.generated.FreshnessStats; -import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResult; import com.linkedin.datahub.graphql.generated.SearchAcrossLineageResults; +import com.linkedin.datahub.graphql.generated.SystemFreshness; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.metadata.search.LineageSearchEntity; import com.linkedin.metadata.search.LineageSearchResult; import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; -import static com.linkedin.datahub.graphql.types.mappers.MapperUtils.*; -import static com.linkedin.datahub.graphql.util.SearchInsightsUtil.*; - - public class UrnSearchAcrossLineageResultsMapper { public static SearchAcrossLineageResults map( LineageSearchResult searchResult) { @@ -32,17 +31,25 @@ public SearchAcrossLineageResults apply(LineageSearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(this::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); if (input.hasFreshness()) { FreshnessStats outputFreshness = new FreshnessStats(); outputFreshness.setCached(input.getFreshness().isCached()); - outputFreshness.setSystemFreshness(input.getFreshness().getSystemFreshness().entrySet().stream().map(x -> - SystemFreshness.builder() - .setSystemName(x.getKey()) - .setFreshnessMillis(x.getValue()) - .build()).collect(Collectors.toList())); + outputFreshness.setSystemFreshness( + input.getFreshness().getSystemFreshness().entrySet().stream() + .map( + x -> + SystemFreshness.builder() + .setSystemName(x.getKey()) + .setFreshnessMillis(x.getValue()) + .build()) + .collect(Collectors.toList())); result.setFreshness(outputFreshness); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java index b16e2f10d1df7a..d814c44e469bcc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mappers/UrnSearchResultsMapper.java @@ -6,7 +6,6 @@ import com.linkedin.metadata.search.SearchResultMetadata; import java.util.stream.Collectors; - public class UrnSearchResultsMapper { public static SearchResults map( com.linkedin.metadata.search.SearchResult searchResult) { @@ -25,9 +24,16 @@ public SearchResults apply(com.linkedin.metadata.search.SearchResult input) { result.setTotal(input.getNumEntities()); final SearchResultMetadata searchResultMetadata = input.getMetadata(); - result.setSearchResults(input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); - result.setFacets(searchResultMetadata.getAggregations().stream().map(MapperUtils::mapFacet).collect(Collectors.toList())); - result.setSuggestions(searchResultMetadata.getSuggestions().stream().map(MapperUtils::mapSearchSuggestion).collect(Collectors.toList())); + result.setSearchResults( + input.getEntities().stream().map(MapperUtils::mapResult).collect(Collectors.toList())); + result.setFacets( + searchResultMetadata.getAggregations().stream() + .map(MapperUtils::mapFacet) + .collect(Collectors.toList())); + result.setSuggestions( + searchResultMetadata.getSuggestions().stream() + .map(MapperUtils::mapSearchSuggestion) + .collect(Collectors.toList())); return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java index f5594afc1a5b5d..da3ddd1115437f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureTableType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,103 +41,122 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLFeatureTableType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("platform", "name"); - private final EntityClient _entityClient; - - public MLFeatureTableType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLFEATURE_TABLE; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLFeatureTable.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) throws Exception { - final List mlFeatureTableUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlFeatureTableMap = _entityClient.batchGetV2(ML_FEATURE_TABLE_ENTITY_NAME, - new HashSet<>(mlFeatureTableUrns), null, context.getAuthentication()); - - final List gmsResults = mlFeatureTableUrns.stream() - .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlFeatureTable -> gmsMlFeatureTable == null ? null - : DataFetcherResult.newResult() - .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatureTables", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeatureTable", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeatureTable", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlFeatureTable", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLFeatureTableType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("platform", "name"); + private final EntityClient _entityClient; + + public MLFeatureTableType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLFEATURE_TABLE; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLFeatureTable.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) throws Exception { + final List mlFeatureTableUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlFeatureTableMap = + _entityClient.batchGetV2( + ML_FEATURE_TABLE_ENTITY_NAME, + new HashSet<>(mlFeatureTableUrns), + null, + context.getAuthentication()); + + final List gmsResults = + mlFeatureTableUrns.stream() + .map(featureTableUrn -> mlFeatureTableMap.getOrDefault(featureTableUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlFeatureTable -> + gmsMlFeatureTable == null + ? null + : DataFetcherResult.newResult() + .data(MLFeatureTableMapper.map(gmsMlFeatureTable)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatureTables", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeatureTable", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlFeatureTable", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlFeatureTable", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java index f5e0d80948bcc1..6f94ea44cd476e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLFeatureType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,94 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLFeatureType implements SearchableEntityType { - private static final Set FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLFeatureType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLFeatureType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLFEATURE; - } + @Override + public EntityType type() { + return EntityType.MLFEATURE; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return MLFeature.class; - } + @Override + public Class objectClass() { + return MLFeature.class; + } - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlFeatureUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlFeatureUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map mlFeatureMap = _entityClient.batchGetV2(ML_FEATURE_ENTITY_NAME, - new HashSet<>(mlFeatureUrns), null, context.getAuthentication()); + try { + final Map mlFeatureMap = + _entityClient.batchGetV2( + ML_FEATURE_ENTITY_NAME, + new HashSet<>(mlFeatureUrns), + null, + context.getAuthentication()); - final List gmsResults = mlFeatureUrns.stream() - .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) - .collect(Collectors.toList()); + final List gmsResults = + mlFeatureUrns.stream() + .map(featureUrn -> mlFeatureMap.getOrDefault(featureUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlFeature -> gmsMlFeature == null ? null - : DataFetcherResult.newResult() - .data(MLFeatureMapper.map(gmsMlFeature)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLFeatures", e); - } + return gmsResults.stream() + .map( + gmsMlFeature -> + gmsMlFeature == null + ? null + : DataFetcherResult.newResult() + .data(MLFeatureMapper.map(gmsMlFeature)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLFeatures", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlFeature", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlFeature", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlFeature", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java index 05b70c15bafc6d..d505b70effdd4c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelGroupType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,104 +41,123 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelGroupType implements SearchableEntityType, - BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelGroupType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL_GROUP; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLModelGroup.class; - } - - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlModelGroupUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlModelMap = _entityClient.batchGetV2(ML_MODEL_GROUP_ENTITY_NAME, - new HashSet<>(mlModelGroupUrns), null, context.getAuthentication()); - - final List gmsResults = mlModelGroupUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModelGroup -> gmsMlModelGroup == null ? null - : DataFetcherResult.newResult() - .data(MLModelGroupMapper.map(gmsMlModelGroup)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModelGroups", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModelGroup", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModelGroup", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModelGroup", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelGroupType + implements SearchableEntityType, + BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelGroupType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL_GROUP; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLModelGroup.class; + } + + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlModelGroupUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_GROUP_ENTITY_NAME, + new HashSet<>(mlModelGroupUrns), + null, + context.getAuthentication()); + + final List gmsResults = + mlModelGroupUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModelGroup -> + gmsMlModelGroup == null + ? null + : DataFetcherResult.newResult() + .data(MLModelGroupMapper.map(gmsMlModelGroup)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModelGroups", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModelGroup", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlModelGroup", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModelGroup", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + MLModelUtils.getMLModelGroupUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java index ef4be247a246b6..27b791d78e78ea 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelType.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -25,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -38,102 +41,116 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - - -public class MLModelType implements SearchableEntityType, BrowsableEntityType { - - private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); - private final EntityClient _entityClient; - - public MLModelType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public EntityType type() { - return EntityType.MLMODEL; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class objectClass() { - return MLModel.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) throws Exception { - final List mlModelUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map mlModelMap = _entityClient.batchGetV2(ML_MODEL_ENTITY_NAME, - new HashSet<>(mlModelUrns), null, context.getAuthentication()); - - final List gmsResults = mlModelUrns.stream() - .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) - .collect(Collectors.toList()); - - return gmsResults.stream() - .map(gmsMlModel -> gmsMlModel == null ? null - : DataFetcherResult.newResult() - .data(MLModelMapper.map(gmsMlModel)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLModels", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlModel", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - @Override - public BrowseResults browse(@Nonnull List path, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse( - "mlModel", - pathStr, - facetFilters, - start, - count, - context.getAuthentication()); - return BrowseResultMapper.map(result); - } - - @Override - public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); - return BrowsePathsMapper.map(result); +public class MLModelType + implements SearchableEntityType, BrowsableEntityType { + + private static final Set FACET_FIELDS = ImmutableSet.of("origin", "platform"); + private final EntityClient _entityClient; + + public MLModelType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public EntityType type() { + return EntityType.MLMODEL; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class objectClass() { + return MLModel.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) throws Exception { + final List mlModelUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map mlModelMap = + _entityClient.batchGetV2( + ML_MODEL_ENTITY_NAME, new HashSet<>(mlModelUrns), null, context.getAuthentication()); + + final List gmsResults = + mlModelUrns.stream() + .map(modelUrn -> mlModelMap.getOrDefault(modelUrn, null)) + .collect(Collectors.toList()); + + return gmsResults.stream() + .map( + gmsMlModel -> + gmsMlModel == null + ? null + : DataFetcherResult.newResult() + .data(MLModelMapper.map(gmsMlModel)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLModels", e); } + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlModel", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("mlModel", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + "mlModel", pathStr, facetFilters, start, count, context.getAuthentication()); + return BrowseResultMapper.map(result); + } + + @Override + public List browsePaths(@Nonnull String urn, @Nonnull final QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths(MLModelUtils.getMLModelUrn(urn), context.getAuthentication()); + return BrowsePathsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java index ff51bab6c114e3..ccecb0ae6406fb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLModelUtils.java @@ -1,44 +1,47 @@ package com.linkedin.datahub.graphql.types.mlmodel; -import java.net.URISyntaxException; - import com.linkedin.common.urn.MLFeatureUrn; import com.linkedin.common.urn.MLModelUrn; import com.linkedin.common.urn.Urn; +import java.net.URISyntaxException; public class MLModelUtils { - private MLModelUtils() { } + private MLModelUtils() {} - static MLModelUrn getMLModelUrn(String modelUrn) { - try { - return MLModelUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); - } + static MLModelUrn getMLModelUrn(String modelUrn) { + try { + return MLModelUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlmodel with urn %s, invalid urn", modelUrn)); } - - static Urn getMLModelGroupUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getMLModelGroupUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlModelGroup with urn %s, invalid urn", modelUrn)); } - - static MLFeatureUrn getMLFeatureUrn(String modelUrn) { - try { - return MLFeatureUrn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); - } + } + + static MLFeatureUrn getMLFeatureUrn(String modelUrn) { + try { + return MLFeatureUrn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve mlFeature with urn %s, invalid urn", modelUrn)); } - - static Urn getUrn(String modelUrn) { - try { - return Urn.createFromString(modelUrn); - } catch (URISyntaxException uriSyntaxException) { - throw new RuntimeException(String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); - } + } + + static Urn getUrn(String modelUrn) { + try { + return Urn.createFromString(modelUrn); + } catch (URISyntaxException uriSyntaxException) { + throw new RuntimeException( + String.format("Failed to retrieve with urn %s, invalid urn", modelUrn)); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java index a6963e6b20abdb..10cfe181dd292f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/MLPrimaryKeyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -18,8 +20,8 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; import java.util.HashSet; @@ -31,78 +33,95 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - public class MLPrimaryKeyType implements SearchableEntityType { - private static final Set FACET_FIELDS = ImmutableSet.of(""); - private final EntityClient _entityClient; + private static final Set FACET_FIELDS = ImmutableSet.of(""); + private final EntityClient _entityClient; - public MLPrimaryKeyType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public MLPrimaryKeyType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.MLPRIMARY_KEY; - } + @Override + public EntityType type() { + return EntityType.MLPRIMARY_KEY; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return MLPrimaryKey.class; - } + @Override + public Class objectClass() { + return MLPrimaryKey.class; + } - @Override - public List> batchLoad(final List urns, @Nonnull final QueryContext context) - throws Exception { - final List mlPrimaryKeyUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + final List urns, @Nonnull final QueryContext context) throws Exception { + final List mlPrimaryKeyUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map mlPrimaryKeyMap = _entityClient.batchGetV2(ML_PRIMARY_KEY_ENTITY_NAME, - new HashSet<>(mlPrimaryKeyUrns), null, context.getAuthentication()); + try { + final Map mlPrimaryKeyMap = + _entityClient.batchGetV2( + ML_PRIMARY_KEY_ENTITY_NAME, + new HashSet<>(mlPrimaryKeyUrns), + null, + context.getAuthentication()); - final List gmsResults = mlPrimaryKeyUrns.stream() - .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) - .collect(Collectors.toList()); + final List gmsResults = + mlPrimaryKeyUrns.stream() + .map(primaryKeyUrn -> mlPrimaryKeyMap.getOrDefault(primaryKeyUrn, null)) + .collect(Collectors.toList()); - return gmsResults.stream() - .map(gmsMlPrimaryKey -> gmsMlPrimaryKey == null ? null - : DataFetcherResult.newResult() - .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); - } + return gmsResults.stream() + .map( + gmsMlPrimaryKey -> + gmsMlPrimaryKey == null + ? null + : DataFetcherResult.newResult() + .data(MLPrimaryKeyMapper.map(gmsMlPrimaryKey)) + .build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load MLPrimaryKeys", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("mlPrimaryKey", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "mlPrimaryKey", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("mlPrimaryKey", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + "mlPrimaryKey", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java index c82909d49acbf5..7db1216e1390d2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/BaseDataMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.BaseData; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class BaseDataMapper implements ModelMapper { - public static final BaseDataMapper INSTANCE = new BaseDataMapper(); + public static final BaseDataMapper INSTANCE = new BaseDataMapper(); - public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { - return INSTANCE.apply(input); - } + public static BaseData map(@NonNull final com.linkedin.ml.metadata.BaseData input) { + return INSTANCE.apply(input); + } - @Override - public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { - final BaseData result = new BaseData(); - result.setDataset(input.getDataset().toString()); - result.setMotivation(input.getMotivation()); - result.setPreProcessing(input.getPreProcessing()); - return result; - } + @Override + public BaseData apply(@NonNull final com.linkedin.ml.metadata.BaseData input) { + final BaseData result = new BaseData(); + result.setDataset(input.getDataset().toString()); + result.setMotivation(input.getMotivation()); + result.setPreProcessing(input.getPreProcessing()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java index c19cb7bae2aff6..108717f325f681 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsAndRecommendationsMapper.java @@ -2,29 +2,34 @@ import com.linkedin.datahub.graphql.generated.CaveatsAndRecommendations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsAndRecommendationsMapper implements ModelMapper { +public class CaveatsAndRecommendationsMapper + implements ModelMapper< + com.linkedin.ml.metadata.CaveatsAndRecommendations, CaveatsAndRecommendations> { - public static final CaveatsAndRecommendationsMapper INSTANCE = new CaveatsAndRecommendationsMapper(); + public static final CaveatsAndRecommendationsMapper INSTANCE = + new CaveatsAndRecommendationsMapper(); - public static CaveatsAndRecommendations map(@NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - return INSTANCE.apply(caveatsAndRecommendations); - } + public static CaveatsAndRecommendations map( + @NonNull com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + return INSTANCE.apply(caveatsAndRecommendations); + } - @Override - public CaveatsAndRecommendations apply(com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { - final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); - if (caveatsAndRecommendations.getCaveats() != null) { - result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); - } - if (caveatsAndRecommendations.getRecommendations() != null) { - result.setRecommendations(caveatsAndRecommendations.getRecommendations()); - } - if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { - result.setIdealDatasetCharacteristics(caveatsAndRecommendations.getIdealDatasetCharacteristics()); - } - return result; + @Override + public CaveatsAndRecommendations apply( + com.linkedin.ml.metadata.CaveatsAndRecommendations caveatsAndRecommendations) { + final CaveatsAndRecommendations result = new CaveatsAndRecommendations(); + if (caveatsAndRecommendations.getCaveats() != null) { + result.setCaveats(CaveatsDetailsMapper.map(caveatsAndRecommendations.getCaveats())); + } + if (caveatsAndRecommendations.getRecommendations() != null) { + result.setRecommendations(caveatsAndRecommendations.getRecommendations()); + } + if (caveatsAndRecommendations.getIdealDatasetCharacteristics() != null) { + result.setIdealDatasetCharacteristics( + caveatsAndRecommendations.getIdealDatasetCharacteristics()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java index 22617a8bc03e7f..2226197e673f50 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/CaveatsDetailsMapper.java @@ -2,24 +2,24 @@ import com.linkedin.datahub.graphql.generated.CaveatDetails; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class CaveatsDetailsMapper implements ModelMapper { +public class CaveatsDetailsMapper + implements ModelMapper { - public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); + public static final CaveatsDetailsMapper INSTANCE = new CaveatsDetailsMapper(); - public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - return INSTANCE.apply(input); - } + public static CaveatDetails map(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + return INSTANCE.apply(input); + } - @Override - public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { - final CaveatDetails result = new CaveatDetails(); + @Override + public CaveatDetails apply(@NonNull final com.linkedin.ml.metadata.CaveatDetails input) { + final CaveatDetails result = new CaveatDetails(); - result.setCaveatDescription(input.getCaveatDescription()); - result.setGroupsNotRepresented(input.getGroupsNotRepresented()); - result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); - return result; - } + result.setCaveatDescription(input.getCaveatDescription()); + result.setGroupsNotRepresented(input.getGroupsNotRepresented()); + result.setNeedsFurtherTesting(input.isNeedsFurtherTesting()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java index 1d967619d43cbc..8959e59265e14e 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/EthicalConsiderationsMapper.java @@ -2,25 +2,27 @@ import com.linkedin.datahub.graphql.generated.EthicalConsiderations; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class EthicalConsiderationsMapper implements ModelMapper { +public class EthicalConsiderationsMapper + implements ModelMapper { - public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); + public static final EthicalConsiderationsMapper INSTANCE = new EthicalConsiderationsMapper(); - public static EthicalConsiderations map(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - return INSTANCE.apply(ethicalConsiderations); - } + public static EthicalConsiderations map( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + return INSTANCE.apply(ethicalConsiderations); + } - @Override - public EthicalConsiderations apply(@NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { - final EthicalConsiderations result = new EthicalConsiderations(); - result.setData(ethicalConsiderations.getData()); - result.setHumanLife(ethicalConsiderations.getHumanLife()); - result.setMitigations(ethicalConsiderations.getMitigations()); - result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); - result.setUseCases(ethicalConsiderations.getUseCases()); - return result; - } + @Override + public EthicalConsiderations apply( + @NonNull final com.linkedin.ml.metadata.EthicalConsiderations ethicalConsiderations) { + final EthicalConsiderations result = new EthicalConsiderations(); + result.setData(ethicalConsiderations.getData()); + result.setHumanLife(ethicalConsiderations.getHumanLife()); + result.setMitigations(ethicalConsiderations.getMitigations()); + result.setRisksAndHarms(ethicalConsiderations.getRisksAndHarms()); + result.setUseCases(ethicalConsiderations.getUseCases()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java index 73aa8db362a545..212db94081371a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterMapMapper.java @@ -3,26 +3,26 @@ import com.linkedin.datahub.graphql.generated.HyperParameterMap; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import com.linkedin.ml.metadata.HyperParameterValueTypeMap; - import lombok.NonNull; -public class HyperParameterMapMapper implements ModelMapper { - - public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); +public class HyperParameterMapMapper + implements ModelMapper { - public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { - return INSTANCE.apply(input); - } + public static final HyperParameterMapMapper INSTANCE = new HyperParameterMapMapper(); - @Override - public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { - final HyperParameterMap result = new HyperParameterMap(); + public static HyperParameterMap map(@NonNull final HyperParameterValueTypeMap input) { + return INSTANCE.apply(input); + } - for (String key: input.keySet()) { - result.setKey(key); - result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); - } + @Override + public HyperParameterMap apply(@NonNull final HyperParameterValueTypeMap input) { + final HyperParameterMap result = new HyperParameterMap(); - return result; + for (String key : input.keySet()) { + result.setKey(key); + result.setValue(HyperParameterValueTypeMapper.map(input.get(key))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java index 6509b0e6cfa84f..f60f34dd7a085f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/HyperParameterValueTypeMapper.java @@ -6,34 +6,37 @@ import com.linkedin.datahub.graphql.generated.IntBox; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class HyperParameterValueTypeMapper implements ModelMapper { +public class HyperParameterValueTypeMapper + implements ModelMapper< + com.linkedin.ml.metadata.HyperParameterValueType, HyperParameterValueType> { - public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); + public static final HyperParameterValueTypeMapper INSTANCE = new HyperParameterValueTypeMapper(); - public static HyperParameterValueType map(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - return INSTANCE.apply(input); - } + public static HyperParameterValueType map( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + return INSTANCE.apply(input); + } - @Override - public HyperParameterValueType apply(@NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { - HyperParameterValueType result = null; + @Override + public HyperParameterValueType apply( + @NonNull final com.linkedin.ml.metadata.HyperParameterValueType input) { + HyperParameterValueType result = null; - if (input.isString()) { - result = new StringBox(input.getString()); - } else if (input.isBoolean()) { - result = new BooleanBox(input.getBoolean()); - } else if (input.isInt()) { - result = new IntBox(input.getInt()); - } else if (input.isDouble()) { - result = new FloatBox(input.getDouble()); - } else if (input.isFloat()) { - result = new FloatBox(new Double(input.getFloat())); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); - } - return result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else if (input.isBoolean()) { + result = new BooleanBox(input.getBoolean()); + } else if (input.isInt()) { + result = new IntBox(input.getInt()); + } else if (input.isDouble()) { + result = new FloatBox(input.getDouble()); + } else if (input.isFloat()) { + result = new FloatBox(new Double(input.getFloat())); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type: " + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java index 47598bc2a3e4c3..9f724ae71a55e9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/IntendedUseMapper.java @@ -1,29 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.IntendedUse; import com.linkedin.datahub.graphql.generated.IntendedUserType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class IntendedUseMapper implements ModelMapper { +public class IntendedUseMapper + implements ModelMapper { - public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); + public static final IntendedUseMapper INSTANCE = new IntendedUseMapper(); - public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - return INSTANCE.apply(intendedUse); - } + public static IntendedUse map(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + return INSTANCE.apply(intendedUse); + } - @Override - public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { - final IntendedUse result = new IntendedUse(); - result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); - result.setPrimaryUses(intendedUse.getPrimaryUses()); - if (intendedUse.getPrimaryUsers() != null) { - result.setPrimaryUsers(intendedUse.getPrimaryUsers().stream().map(v -> IntendedUserType.valueOf(v.toString())).collect(Collectors.toList())); - } - return result; + @Override + public IntendedUse apply(@NonNull final com.linkedin.ml.metadata.IntendedUse intendedUse) { + final IntendedUse result = new IntendedUse(); + result.setOutOfScopeUses(intendedUse.getOutOfScopeUses()); + result.setPrimaryUses(intendedUse.getPrimaryUses()); + if (intendedUse.getPrimaryUsers() != null) { + result.setPrimaryUsers( + intendedUse.getPrimaryUsers().stream() + .map(v -> IntendedUserType.valueOf(v.toString())) + .collect(Collectors.toList())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java index 010ae477251f39..58e59edfa2e389 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureMapper.java @@ -1,10 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -37,91 +37,105 @@ import com.linkedin.ml.metadata.MLFeatureProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureMapper implements ModelMapper { - public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); + public static final MLFeatureMapper INSTANCE = new MLFeatureMapper(); - public static MLFeature map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeature map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeature apply(@Nonnull final EntityResponse entityResponse) { - final MLFeature result = new MLFeature(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeature apply(@Nonnull final EntityResponse entityResponse) { + final MLFeature result = new MLFeature(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeature, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_FEATURE_KEY_ASPECT_NAME, this::mapMLFeatureKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeature, dataMap) -> - mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeature, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_PROPERTIES_ASPECT_NAME, this::mapMLFeatureProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeature, dataMap) -> + mlFeature.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeature, dataMap) -> mlFeature.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); + return mappingHelper.getResult(); + } + + private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); + mlFeature.setName(mlFeatureKey.getName()); + mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); + } + + private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { + MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); + mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); + mlFeature.setDescription(featureProperties.getDescription()); + if (featureProperties.getDataType() != null) { + mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); } - - private void mapMLFeatureKey(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureKey mlFeatureKey = new MLFeatureKey(dataMap); - mlFeature.setName(mlFeatureKey.getName()); - mlFeature.setFeatureNamespace(mlFeatureKey.getFeatureNamespace()); - } - - private void mapMLFeatureProperties(@Nonnull MLFeature mlFeature, @Nonnull DataMap dataMap) { - MLFeatureProperties featureProperties = new MLFeatureProperties(dataMap); - mlFeature.setFeatureProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setProperties(MLFeaturePropertiesMapper.map(featureProperties)); - mlFeature.setDescription(featureProperties.getDescription()); - if (featureProperties.getDataType() != null) { - mlFeature.setDataType(MLFeatureDataType.valueOf(featureProperties.getDataType().toString())); - } - } - - private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeature entity, DataMap dataMap) { - EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); - MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLFeature entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeature entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeature entity, DataMap dataMap) { + EditableMLFeatureProperties input = new EditableMLFeatureProperties(dataMap); + MLFeatureEditableProperties editableProperties = new MLFeatureEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java index 9d647a38d2153a..7bcefbc305192b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeaturePropertiesMapper.java @@ -1,44 +1,46 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLFeatureProperties; -import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLFeaturePropertiesMapper implements ModelMapper { +public class MLFeaturePropertiesMapper + implements ModelMapper { - public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); + public static final MLFeaturePropertiesMapper INSTANCE = new MLFeaturePropertiesMapper(); - public static MLFeatureProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - return INSTANCE.apply(mlFeatureProperties); - } + public static MLFeatureProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + return INSTANCE.apply(mlFeatureProperties); + } - @Override - public MLFeatureProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { - final MLFeatureProperties result = new MLFeatureProperties(); + @Override + public MLFeatureProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureProperties mlFeatureProperties) { + final MLFeatureProperties result = new MLFeatureProperties(); - result.setDescription(mlFeatureProperties.getDescription()); - if (mlFeatureProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); - } - if (mlFeatureProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); - } - if (mlFeatureProperties.getSources() != null) { - result.setSources(mlFeatureProperties - .getSources() - .stream() - .map(urn -> { + result.setDescription(mlFeatureProperties.getDescription()); + if (mlFeatureProperties.getDataType() != null) { + result.setDataType(MLFeatureDataType.valueOf(mlFeatureProperties.getDataType().toString())); + } + if (mlFeatureProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlFeatureProperties.getVersion())); + } + if (mlFeatureProperties.getSources() != null) { + result.setSources( + mlFeatureProperties.getSources().stream() + .map( + urn -> { final Dataset dataset = new Dataset(); dataset.setUrn(urn.toString()); return dataset; - }) - .collect(Collectors.toList())); - } - - return result; + }) + .collect(Collectors.toList())); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java index 3ba9a76c4bdde1..d074e14f95c82c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTableMapper.java @@ -1,9 +1,10 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; - import com.linkedin.common.GlobalTags; import com.linkedin.common.GlossaryTerms; import com.linkedin.common.InstitutionalMemory; @@ -36,90 +37,111 @@ import com.linkedin.ml.metadata.MLFeatureTableProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLFeatureTableMapper implements ModelMapper { - public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); + public static final MLFeatureTableMapper INSTANCE = new MLFeatureTableMapper(); - public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLFeatureTable map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { - final MLFeatureTable result = new MLFeatureTable(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLFeatureTable apply(@Nonnull final EntityResponse entityResponse) { + final MLFeatureTable result = new MLFeatureTable(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLFEATURE_TABLE); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLFEATURE_TABLE); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlFeatureTable, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); - mappingHelper.mapToResult(ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlFeatureTable, dataMap) -> - mlFeatureTable.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult(ML_FEATURE_TABLE_KEY_ASPECT_NAME, this::mapMLFeatureTableKey); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLFeatureTableProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlFeatureTable, dataMap) -> + mlFeatureTable.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlFeatureTable, dataMap) -> + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlFeatureTable, dataMap) -> mlFeatureTable.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (entity, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_FEATURE_TABLE_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (entity, dataMap) -> entity.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLFeatureTableKey(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { - MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); - mlFeatureTable.setName(mlFeatureTableKey.getName()); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); - mlFeatureTable.setPlatform(partialPlatform); - } - - private void mapMLFeatureTableProperties(@Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { - MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); - mlFeatureTable.setFeatureTableProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setProperties(MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); - mlFeatureTable.setDescription(featureTableProperties.getDescription()); - } - - private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { - EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); - MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + return mappingHelper.getResult(); + } + + private void mapMLFeatureTableKey( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap) { + MLFeatureTableKey mlFeatureTableKey = new MLFeatureTableKey(dataMap); + mlFeatureTable.setName(mlFeatureTableKey.getName()); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlFeatureTableKey.getPlatform().toString()); + mlFeatureTable.setPlatform(partialPlatform); + } + + private void mapMLFeatureTableProperties( + @Nonnull MLFeatureTable mlFeatureTable, @Nonnull DataMap dataMap, Urn entityUrn) { + MLFeatureTableProperties featureTableProperties = new MLFeatureTableProperties(dataMap); + mlFeatureTable.setFeatureTableProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setProperties( + MLFeatureTablePropertiesMapper.map(featureTableProperties, entityUrn)); + mlFeatureTable.setDescription(featureTableProperties.getDescription()); + } + + private void mapGlobalTags(MLFeatureTable entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLFeatureTable entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLFeatureTable entity, DataMap dataMap) { + EditableMLFeatureTableProperties input = new EditableMLFeatureTableProperties(dataMap); + MLFeatureTableEditableProperties editableProperties = new MLFeatureTableEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java index 13e3c795997250..fff504d43c81a1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLFeatureTablePropertiesMapper.java @@ -5,46 +5,55 @@ import com.linkedin.datahub.graphql.generated.MLFeatureTableProperties; import com.linkedin.datahub.graphql.generated.MLPrimaryKey; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; public class MLFeatureTablePropertiesMapper { - public static final MLFeatureTablePropertiesMapper INSTANCE = new MLFeatureTablePropertiesMapper(); - - public static MLFeatureTableProperties map(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + public static final MLFeatureTablePropertiesMapper INSTANCE = + new MLFeatureTablePropertiesMapper(); + + public static MLFeatureTableProperties map( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + return INSTANCE.apply(mlFeatureTableProperties, entityUrn); + } + + public MLFeatureTableProperties apply( + @NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, + Urn entityUrn) { + final MLFeatureTableProperties result = new MLFeatureTableProperties(); + + result.setDescription(mlFeatureTableProperties.getDescription()); + if (mlFeatureTableProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlFeatureTableProperties.getMlFeatures().stream() + .map( + urn -> { + final MLFeature mlFeature = new MLFeature(); + mlFeature.setUrn(urn.toString()); + return mlFeature; + }) + .collect(Collectors.toList())); } - public MLFeatureTableProperties apply(@NonNull final com.linkedin.ml.metadata.MLFeatureTableProperties mlFeatureTableProperties, Urn entityUrn) { - final MLFeatureTableProperties result = new MLFeatureTableProperties(); - - result.setDescription(mlFeatureTableProperties.getDescription()); - if (mlFeatureTableProperties.getMlFeatures() != null) { - result.setMlFeatures(mlFeatureTableProperties.getMlFeatures().stream().map(urn -> { - final MLFeature mlFeature = new MLFeature(); - mlFeature.setUrn(urn.toString()); - return mlFeature; - }).collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { - result.setMlPrimaryKeys(mlFeatureTableProperties - .getMlPrimaryKeys() - .stream() - .map(urn -> { + if (mlFeatureTableProperties.getMlPrimaryKeys() != null) { + result.setMlPrimaryKeys( + mlFeatureTableProperties.getMlPrimaryKeys().stream() + .map( + urn -> { final MLPrimaryKey mlPrimaryKey = new MLPrimaryKey(); mlPrimaryKey.setUrn(urn.toString()); return mlPrimaryKey; - }) - .collect(Collectors.toList())); - } - - if (mlFeatureTableProperties.hasCustomProperties()) { - result.setCustomProperties(CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); - } + }) + .collect(Collectors.toList())); + } - return result; + if (mlFeatureTableProperties.hasCustomProperties()) { + result.setCustomProperties( + CustomPropertiesMapper.map(mlFeatureTableProperties.getCustomProperties(), entityUrn)); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java index 5cc242d0b19f2f..bb3c85e411e715 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLHyperParamMapper.java @@ -2,25 +2,25 @@ import com.linkedin.datahub.graphql.generated.MLHyperParam; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class MLHyperParamMapper implements ModelMapper { +public class MLHyperParamMapper + implements ModelMapper { - public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); + public static final MLHyperParamMapper INSTANCE = new MLHyperParamMapper(); - public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - return INSTANCE.apply(input); - } + public static MLHyperParam map(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + return INSTANCE.apply(input); + } - @Override - public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { - final MLHyperParam result = new MLHyperParam(); + @Override + public MLHyperParam apply(@NonNull final com.linkedin.ml.metadata.MLHyperParam input) { + final MLHyperParam result = new MLHyperParam(); - result.setDescription(input.getDescription()); - result.setValue(input.getValue()); - result.setCreatedAt(input.getCreatedAt()); - result.setName(input.getName()); - return result; - } + result.setDescription(input.getDescription()); + result.setValue(input.getValue()); + result.setCreatedAt(input.getCreatedAt()); + result.setName(input.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java index 2545bd5f8a848c..765a44d218567d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLMetricMapper.java @@ -4,22 +4,21 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; - public class MLMetricMapper implements ModelMapper { - public static final MLMetricMapper INSTANCE = new MLMetricMapper(); + public static final MLMetricMapper INSTANCE = new MLMetricMapper(); - public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - return INSTANCE.apply(metric); - } + public static MLMetric map(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + return INSTANCE.apply(metric); + } - @Override - public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { - final MLMetric result = new MLMetric(); - result.setDescription(metric.getDescription()); - result.setValue(metric.getValue()); - result.setCreatedAt(metric.getCreatedAt()); - result.setName(metric.getName()); - return result; - } + @Override + public MLMetric apply(@NonNull final com.linkedin.ml.metadata.MLMetric metric) { + final MLMetric result = new MLMetric(); + result.setDescription(metric.getDescription()); + result.setValue(metric.getValue()); + result.setCreatedAt(metric.getCreatedAt()); + result.setName(metric.getName()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java index 0d32f7275e5fec..e86072ce3848e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorPromptsMapper.java @@ -1,29 +1,36 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.stream.Collectors; - import com.linkedin.datahub.graphql.generated.MLModelFactorPrompts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.stream.Collectors; import lombok.NonNull; -public class MLModelFactorPromptsMapper implements ModelMapper { +public class MLModelFactorPromptsMapper + implements ModelMapper { - public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); + public static final MLModelFactorPromptsMapper INSTANCE = new MLModelFactorPromptsMapper(); - public static MLModelFactorPrompts map(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - return INSTANCE.apply(input); - } + public static MLModelFactorPrompts map( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + return INSTANCE.apply(input); + } - @Override - public MLModelFactorPrompts apply(@NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { - final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); - if (input.getEvaluationFactors() != null) { - mlModelFactorPrompts.setEvaluationFactors(input.getEvaluationFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - if (input.getRelevantFactors() != null) { - mlModelFactorPrompts.setRelevantFactors(input.getRelevantFactors().stream().map(MLModelFactorsMapper::map).collect(Collectors.toList())); - } - return mlModelFactorPrompts; + @Override + public MLModelFactorPrompts apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactorPrompts input) { + final MLModelFactorPrompts mlModelFactorPrompts = new MLModelFactorPrompts(); + if (input.getEvaluationFactors() != null) { + mlModelFactorPrompts.setEvaluationFactors( + input.getEvaluationFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); + } + if (input.getRelevantFactors() != null) { + mlModelFactorPrompts.setRelevantFactors( + input.getRelevantFactors().stream() + .map(MLModelFactorsMapper::map) + .collect(Collectors.toList())); } + return mlModelFactorPrompts; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java index aa4737dfd229c8..3b212eca52801a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelFactorsMapper.java @@ -1,32 +1,33 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import java.util.ArrayList; - import com.linkedin.datahub.graphql.generated.MLModelFactors; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - +import java.util.ArrayList; import lombok.NonNull; -public class MLModelFactorsMapper implements ModelMapper { +public class MLModelFactorsMapper + implements ModelMapper { - public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); + public static final MLModelFactorsMapper INSTANCE = new MLModelFactorsMapper(); - public static MLModelFactors map(@NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { - return INSTANCE.apply(modelFactors); - } + public static MLModelFactors map( + @NonNull final com.linkedin.ml.metadata.MLModelFactors modelFactors) { + return INSTANCE.apply(modelFactors); + } - @Override - public MLModelFactors apply(@NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { - final MLModelFactors result = new MLModelFactors(); - if (mlModelFactors.getEnvironment() != null) { - result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); - } - if (mlModelFactors.getGroups() != null) { - result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); - } - if (mlModelFactors.getInstrumentation() != null) { - result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); - } - return result; + @Override + public MLModelFactors apply( + @NonNull final com.linkedin.ml.metadata.MLModelFactors mlModelFactors) { + final MLModelFactors result = new MLModelFactors(); + if (mlModelFactors.getEnvironment() != null) { + result.setEnvironment(new ArrayList<>(mlModelFactors.getEnvironment())); + } + if (mlModelFactors.getGroups() != null) { + result.setGroups(new ArrayList<>(mlModelFactors.getGroups())); + } + if (mlModelFactors.getInstrumentation() != null) { + result.setInstrumentation(new ArrayList<>(mlModelFactors.getInstrumentation())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java index 311ee121bcaf98..cc9baaa33a660a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; @@ -34,90 +36,102 @@ import com.linkedin.ml.metadata.MLModelGroupProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelGroupMapper implements ModelMapper { - public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); + public static final MLModelGroupMapper INSTANCE = new MLModelGroupMapper(); - public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModelGroup map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { - final MLModelGroup result = new MLModelGroup(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModelGroup apply(@Nonnull final EntityResponse entityResponse) { + final MLModelGroup result = new MLModelGroup(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL_GROUP); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL_GROUP); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModelGroup, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); - mappingHelper.mapToResult(ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModelGroup, dataMap) -> - mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult(ML_MODEL_GROUP_KEY_ASPECT_NAME, this::mapToMLModelGroupKey); + mappingHelper.mapToResult( + ML_MODEL_GROUP_PROPERTIES_ASPECT_NAME, this::mapToMLModelGroupProperties); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModelGroup, dataMap) -> + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_GROUP_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModelGroup, dataMap) -> mlModelGroup.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); - mlModelGroup.setName(mlModelGroupKey.getName()); - mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); - mlModelGroup.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapToMLModelGroupKey(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupKey mlModelGroupKey = new MLModelGroupKey(dataMap); + mlModelGroup.setName(mlModelGroupKey.getName()); + mlModelGroup.setOrigin(FabricType.valueOf(mlModelGroupKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelGroupKey.getPlatform().toString()); + mlModelGroup.setPlatform(partialPlatform); + } + + private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { + MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); + mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); + if (modelGroupProperties.getDescription() != null) { + mlModelGroup.setDescription(modelGroupProperties.getDescription()); } - - private void mapToMLModelGroupProperties(MLModelGroup mlModelGroup, DataMap dataMap) { - MLModelGroupProperties modelGroupProperties = new MLModelGroupProperties(dataMap); - mlModelGroup.setProperties(MLModelGroupPropertiesMapper.map(modelGroupProperties)); - if (modelGroupProperties.getDescription() != null) { - mlModelGroup.setDescription(modelGroupProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } - - private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { - EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); - MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModelGroup entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } + + private void mapDomains(@Nonnull MLModelGroup entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModelGroup entity, DataMap dataMap) { + EditableMLModelGroupProperties input = new EditableMLModelGroupProperties(dataMap); + MLModelGroupEditableProperties editableProperties = new MLModelGroupEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java index 9a12d7917e6489..bae60a026b49a3 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelGroupPropertiesMapper.java @@ -1,28 +1,31 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - import com.linkedin.datahub.graphql.generated.MLModelGroupProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import lombok.NonNull; -public class MLModelGroupPropertiesMapper implements ModelMapper { - - public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); +public class MLModelGroupPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLModelGroupProperties, MLModelGroupProperties> { - public static MLModelGroupProperties map(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - return INSTANCE.apply(mlModelGroupProperties); - } + public static final MLModelGroupPropertiesMapper INSTANCE = new MLModelGroupPropertiesMapper(); - @Override - public MLModelGroupProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { - final MLModelGroupProperties result = new MLModelGroupProperties(); + public static MLModelGroupProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + return INSTANCE.apply(mlModelGroupProperties); + } - result.setDescription(mlModelGroupProperties.getDescription()); - if (mlModelGroupProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); - } - result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + @Override + public MLModelGroupProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelGroupProperties mlModelGroupProperties) { + final MLModelGroupProperties result = new MLModelGroupProperties(); - return result; + result.setDescription(mlModelGroupProperties.getDescription()); + if (mlModelGroupProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlModelGroupProperties.getVersion())); } + result.setCreatedAt(mlModelGroupProperties.getCreatedAt()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java index 0c2eeabe5701d9..827b35c282237a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.Cost; import com.linkedin.common.DataPlatformInstance; @@ -48,124 +50,165 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLModelMapper implements ModelMapper { - public static final MLModelMapper INSTANCE = new MLModelMapper(); + public static final MLModelMapper INSTANCE = new MLModelMapper(); - public static MLModel map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLModel map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLModel apply(@Nonnull final EntityResponse entityResponse) { - final MLModel result = new MLModel(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLModel apply(@Nonnull final EntityResponse entityResponse) { + final MLModel result = new MLModel(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLMODEL); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLMODEL); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlModel, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(ML_MODEL_KEY_ASPECT_NAME, this::mapMLModelKey); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_MODEL_PROPERTIES_ASPECT_NAME, (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); - mappingHelper.mapToResult(INTENDED_USE_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_PROPERTIES_ASPECT_NAME, + (entity, dataMap) -> this.mapMLModelProperties(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (mlModel, dataMap) -> this.mapGlobalTags(mlModel, dataMap, entityUrn)); + mappingHelper.mapToResult( + INTENDED_USE_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setIntendedUse(IntendedUseMapper.map(new IntendedUse(dataMap)))); - mappingHelper.mapToResult(ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setFactorPrompts(MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); - mappingHelper.mapToResult(METRICS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); - mappingHelper.mapToResult(EVALUATION_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEvaluationData(new EvaluationData(dataMap).getEvaluationData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(TRAINING_DATA_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setTrainingData(new TrainingData(dataMap).getTrainingData() - .stream().map(BaseDataMapper::map) - .collect(Collectors.toList()))); - mappingHelper.mapToResult(QUANTITATIVE_ANALYSES_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setQuantitativeAnalyses(QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); - mappingHelper.mapToResult(ETHICAL_CONSIDERATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setEthicalConsiderations(EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); - mappingHelper.mapToResult(CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCaveatsAndRecommendations(CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(COST_ASPECT_NAME, (mlModel, dataMap) -> - mlModel.setCost(CostMapper.map(new Cost(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + ML_MODEL_FACTOR_PROMPTS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setFactorPrompts( + MLModelFactorPromptsMapper.map(new MLModelFactorPrompts(dataMap)))); + mappingHelper.mapToResult( + METRICS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setMetrics(MetricsMapper.map(new Metrics(dataMap)))); + mappingHelper.mapToResult( + EVALUATION_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEvaluationData( + new EvaluationData(dataMap) + .getEvaluationData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + TRAINING_DATA_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setTrainingData( + new TrainingData(dataMap) + .getTrainingData().stream() + .map(BaseDataMapper::map) + .collect(Collectors.toList()))); + mappingHelper.mapToResult( + QUANTITATIVE_ANALYSES_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setQuantitativeAnalyses( + QuantitativeAnalysesMapper.map(new QuantitativeAnalyses(dataMap)))); + mappingHelper.mapToResult( + ETHICAL_CONSIDERATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setEthicalConsiderations( + EthicalConsiderationsMapper.map(new EthicalConsiderations(dataMap)))); + mappingHelper.mapToResult( + CAVEATS_AND_RECOMMENDATIONS_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setCaveatsAndRecommendations( + CaveatsAndRecommendationsMapper.map(new CaveatsAndRecommendations(dataMap)))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlModel, dataMap) -> + mlModel.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult(SOURCE_CODE_ASPECT_NAME, this::mapSourceCode); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + COST_ASPECT_NAME, (mlModel, dataMap) -> mlModel.setCost(CostMapper.map(new Cost(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (mlModel, dataMap) -> + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_MODEL_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (mlModel, dataMap) -> mlModel.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); - return mappingHelper.getResult(); - } - - private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { - MLModelKey mlModelKey = new MLModelKey(dataMap); - mlModel.setName(mlModelKey.getName()); - mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); - DataPlatform partialPlatform = new DataPlatform(); - partialPlatform.setUrn(mlModelKey.getPlatform().toString()); - mlModel.setPlatform(partialPlatform); + return mappingHelper.getResult(); + } + + private void mapMLModelKey(MLModel mlModel, DataMap dataMap) { + MLModelKey mlModelKey = new MLModelKey(dataMap); + mlModel.setName(mlModelKey.getName()); + mlModel.setOrigin(FabricType.valueOf(mlModelKey.getOrigin().toString())); + DataPlatform partialPlatform = new DataPlatform(); + partialPlatform.setUrn(mlModelKey.getPlatform().toString()); + mlModel.setPlatform(partialPlatform); + } + + private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + MLModelProperties modelProperties = new MLModelProperties(dataMap); + mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); + if (modelProperties.getDescription() != null) { + mlModel.setDescription(modelProperties.getDescription()); } - - private void mapMLModelProperties(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - MLModelProperties modelProperties = new MLModelProperties(dataMap); - mlModel.setProperties(MLModelPropertiesMapper.map(modelProperties, entityUrn)); - if (modelProperties.getDescription() != null) { - mlModel.setDescription(modelProperties.getDescription()); - } - } - - private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - mlModel.setGlobalTags(graphQlGlobalTags); - mlModel.setTags(graphQlGlobalTags); - } - - private void mapSourceCode(MLModel mlModel, DataMap dataMap) { - SourceCode sourceCode = new SourceCode(dataMap); - com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = - new com.linkedin.datahub.graphql.generated.SourceCode(); - graphQlSourceCode.setSourceCode(sourceCode.getSourceCode().stream() - .map(SourceCodeUrlMapper::map).collect(Collectors.toList())); - mlModel.setSourceCode(graphQlSourceCode); - } - - private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } - - private void mapEditableProperties(MLModel entity, DataMap dataMap) { - EditableMLModelProperties input = new EditableMLModelProperties(dataMap); - MLModelEditableProperties editableProperties = new MLModelEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + } + + private void mapGlobalTags(MLModel mlModel, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + mlModel.setGlobalTags(graphQlGlobalTags); + mlModel.setTags(graphQlGlobalTags); + } + + private void mapSourceCode(MLModel mlModel, DataMap dataMap) { + SourceCode sourceCode = new SourceCode(dataMap); + com.linkedin.datahub.graphql.generated.SourceCode graphQlSourceCode = + new com.linkedin.datahub.graphql.generated.SourceCode(); + graphQlSourceCode.setSourceCode( + sourceCode.getSourceCode().stream() + .map(SourceCodeUrlMapper::map) + .collect(Collectors.toList())); + mlModel.setSourceCode(graphQlSourceCode); + } + + private void mapDomains(@Nonnull MLModel entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } + + private void mapEditableProperties(MLModel entity, DataMap dataMap) { + EditableMLModelProperties input = new EditableMLModelProperties(dataMap); + MLModelEditableProperties editableProperties = new MLModelEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java index 554c14e9a4a56d..f2781f5bca5c88 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLModelPropertiesMapper.java @@ -1,65 +1,71 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; - +import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.MLModelGroup; +import com.linkedin.datahub.graphql.generated.MLModelProperties; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import java.util.stream.Collectors; - -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.MLModelProperties; - import lombok.NonNull; public class MLModelPropertiesMapper { - public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); + public static final MLModelPropertiesMapper INSTANCE = new MLModelPropertiesMapper(); - public static MLModelProperties map(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - return INSTANCE.apply(mlModelProperties, entityUrn); - } + public static MLModelProperties map( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + return INSTANCE.apply(mlModelProperties, entityUrn); + } - public MLModelProperties apply(@NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { - final MLModelProperties result = new MLModelProperties(); + public MLModelProperties apply( + @NonNull final com.linkedin.ml.metadata.MLModelProperties mlModelProperties, Urn entityUrn) { + final MLModelProperties result = new MLModelProperties(); - result.setDate(mlModelProperties.getDate()); - result.setDescription(mlModelProperties.getDescription()); - if (mlModelProperties.getExternalUrl() != null) { - result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); - } - if (mlModelProperties.getVersion() != null) { - result.setVersion(mlModelProperties.getVersion().getVersionTag()); - } - result.setType(mlModelProperties.getType()); - if (mlModelProperties.getHyperParams() != null) { - result.setHyperParams(mlModelProperties.getHyperParams().stream().map( - param -> MLHyperParamMapper.map(param)).collect(Collectors.toList())); - } - - result.setCustomProperties(CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); + result.setDate(mlModelProperties.getDate()); + result.setDescription(mlModelProperties.getDescription()); + if (mlModelProperties.getExternalUrl() != null) { + result.setExternalUrl(mlModelProperties.getExternalUrl().toString()); + } + if (mlModelProperties.getVersion() != null) { + result.setVersion(mlModelProperties.getVersion().getVersionTag()); + } + result.setType(mlModelProperties.getType()); + if (mlModelProperties.getHyperParams() != null) { + result.setHyperParams( + mlModelProperties.getHyperParams().stream() + .map(param -> MLHyperParamMapper.map(param)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getTrainingMetrics() != null) { - result.setTrainingMetrics(mlModelProperties.getTrainingMetrics().stream().map(metric -> - MLMetricMapper.map(metric) - ).collect(Collectors.toList())); - } + result.setCustomProperties( + CustomPropertiesMapper.map(mlModelProperties.getCustomProperties(), entityUrn)); - if (mlModelProperties.getGroups() != null) { - result.setGroups(mlModelProperties.getGroups().stream().map(group -> { - final MLModelGroup subgroup = new MLModelGroup(); - subgroup.setUrn(group.toString()); - return subgroup; - }).collect(Collectors.toList())); - } + if (mlModelProperties.getTrainingMetrics() != null) { + result.setTrainingMetrics( + mlModelProperties.getTrainingMetrics().stream() + .map(metric -> MLMetricMapper.map(metric)) + .collect(Collectors.toList())); + } - if (mlModelProperties.getMlFeatures() != null) { - result.setMlFeatures(mlModelProperties - .getMlFeatures() - .stream() - .map(Urn::toString) - .collect(Collectors.toList())); - } - result.setTags(mlModelProperties.getTags()); + if (mlModelProperties.getGroups() != null) { + result.setGroups( + mlModelProperties.getGroups().stream() + .map( + group -> { + final MLModelGroup subgroup = new MLModelGroup(); + subgroup.setUrn(group.toString()); + return subgroup; + }) + .collect(Collectors.toList())); + } - return result; + if (mlModelProperties.getMlFeatures() != null) { + result.setMlFeatures( + mlModelProperties.getMlFeatures().stream() + .map(Urn::toString) + .collect(Collectors.toList())); } + result.setTags(mlModelProperties.getTags()); + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java index 0bd5db4d884ae4..a8efd748401f02 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.Deprecation; import com.linkedin.common.GlobalTags; @@ -33,88 +35,102 @@ import com.linkedin.ml.metadata.MLPrimaryKeyProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -/** - * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. - * - */ +/** Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. */ public class MLPrimaryKeyMapper implements ModelMapper { - public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); + public static final MLPrimaryKeyMapper INSTANCE = new MLPrimaryKeyMapper(); - public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } + public static MLPrimaryKey map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - @Override - public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { - final MLPrimaryKey result = new MLPrimaryKey(); - Urn entityUrn = entityResponse.getUrn(); + @Override + public MLPrimaryKey apply(@Nonnull final EntityResponse entityResponse) { + final MLPrimaryKey result = new MLPrimaryKey(); + Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.MLPRIMARY_KEY); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); - result.setLastIngested(lastIngested); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.MLPRIMARY_KEY); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + Long lastIngested = SystemMetadataUtils.getLastIngestedTime(aspectMap); + result.setLastIngested(lastIngested); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); - mappingHelper.mapToResult(ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (mlPrimaryKey, dataMap) -> - mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(DEPRECATION_ASPECT_NAME, (mlPrimaryKey, dataMap) -> + mappingHelper.mapToResult(ML_PRIMARY_KEY_KEY_ASPECT_NAME, this::mapMLPrimaryKeyKey); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_PROPERTIES_ASPECT_NAME, this::mapMLPrimaryKeyProperties); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> + mlPrimaryKey.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + DEPRECATION_ASPECT_NAME, + (mlPrimaryKey, dataMap) -> mlPrimaryKey.setDeprecation(DeprecationMapper.map(new Deprecation(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (entity, dataMap) -> - entity.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); - mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); - mappingHelper.mapToResult(ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); - mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, (dataset, dataMap) -> - dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); - return mappingHelper.getResult(); - } + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (entity, dataMap) -> this.mapGlobalTags(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (entity, dataMap) -> + entity.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); + mappingHelper.mapToResult( + ML_PRIMARY_KEY_EDITABLE_PROPERTIES_ASPECT_NAME, this::mapEditableProperties); + mappingHelper.mapToResult( + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + (dataset, dataMap) -> + dataset.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); + return mappingHelper.getResult(); + } - private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); - mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); - mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); - } + private void mapMLPrimaryKeyKey(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyKey mlPrimaryKeyKey = new MLPrimaryKeyKey(dataMap); + mlPrimaryKey.setName(mlPrimaryKeyKey.getName()); + mlPrimaryKey.setFeatureNamespace(mlPrimaryKeyKey.getFeatureNamespace()); + } - private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { - MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); - mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); - mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); - if (primaryKeyProperties.getDataType() != null) { - mlPrimaryKey.setDataType(MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); - } + private void mapMLPrimaryKeyProperties(MLPrimaryKey mlPrimaryKey, DataMap dataMap) { + MLPrimaryKeyProperties primaryKeyProperties = new MLPrimaryKeyProperties(dataMap); + mlPrimaryKey.setPrimaryKeyProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setProperties(MLPrimaryKeyPropertiesMapper.map(primaryKeyProperties)); + mlPrimaryKey.setDescription(primaryKeyProperties.getDescription()); + if (primaryKeyProperties.getDataType() != null) { + mlPrimaryKey.setDataType( + MLFeatureDataType.valueOf(primaryKeyProperties.getDataType().toString())); } + } - private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { - GlobalTags globalTags = new GlobalTags(dataMap); - com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = GlobalTagsMapper.map(globalTags, entityUrn); - entity.setTags(graphQlGlobalTags); - } + private void mapGlobalTags(MLPrimaryKey entity, DataMap dataMap, Urn entityUrn) { + GlobalTags globalTags = new GlobalTags(dataMap); + com.linkedin.datahub.graphql.generated.GlobalTags graphQlGlobalTags = + GlobalTagsMapper.map(globalTags, entityUrn); + entity.setTags(graphQlGlobalTags); + } - private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { - final Domains domains = new Domains(dataMap); - // Currently we only take the first domain if it exists. - entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); - } + private void mapDomains(@Nonnull MLPrimaryKey entity, @Nonnull DataMap dataMap) { + final Domains domains = new Domains(dataMap); + // Currently we only take the first domain if it exists. + entity.setDomain(DomainAssociationMapper.map(domains, entity.getUrn())); + } - private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { - EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); - MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); - if (input.hasDescription()) { - editableProperties.setDescription(input.getDescription()); - } - entity.setEditableProperties(editableProperties); + private void mapEditableProperties(MLPrimaryKey entity, DataMap dataMap) { + EditableMLPrimaryKeyProperties input = new EditableMLPrimaryKeyProperties(dataMap); + MLPrimaryKeyEditableProperties editableProperties = new MLPrimaryKeyEditableProperties(); + if (input.hasDescription()) { + editableProperties.setDescription(input.getDescription()); } + entity.setEditableProperties(editableProperties); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java index 39ecd96af182f3..16d6120cd9dff2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MLPrimaryKeyPropertiesMapper.java @@ -4,39 +4,43 @@ import com.linkedin.datahub.graphql.generated.MLFeatureDataType; import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; -import lombok.NonNull; - import java.util.stream.Collectors; +import lombok.NonNull; -public class MLPrimaryKeyPropertiesMapper implements ModelMapper { +public class MLPrimaryKeyPropertiesMapper + implements ModelMapper< + com.linkedin.ml.metadata.MLPrimaryKeyProperties, MLPrimaryKeyProperties> { - public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); + public static final MLPrimaryKeyPropertiesMapper INSTANCE = new MLPrimaryKeyPropertiesMapper(); - public static MLPrimaryKeyProperties map(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - return INSTANCE.apply(mlPrimaryKeyProperties); - } + public static MLPrimaryKeyProperties map( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + return INSTANCE.apply(mlPrimaryKeyProperties); + } - @Override - public MLPrimaryKeyProperties apply(@NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { - final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); + @Override + public MLPrimaryKeyProperties apply( + @NonNull final com.linkedin.ml.metadata.MLPrimaryKeyProperties mlPrimaryKeyProperties) { + final MLPrimaryKeyProperties result = new MLPrimaryKeyProperties(); - result.setDescription(mlPrimaryKeyProperties.getDescription()); - if (mlPrimaryKeyProperties.getDataType() != null) { - result.setDataType(MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); - } - if (mlPrimaryKeyProperties.getVersion() != null) { - result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); - } - result.setSources(mlPrimaryKeyProperties - .getSources() - .stream() - .map(urn -> { - final Dataset dataset = new Dataset(); - dataset.setUrn(urn.toString()); - return dataset; - }) + result.setDescription(mlPrimaryKeyProperties.getDescription()); + if (mlPrimaryKeyProperties.getDataType() != null) { + result.setDataType( + MLFeatureDataType.valueOf(mlPrimaryKeyProperties.getDataType().toString())); + } + if (mlPrimaryKeyProperties.getVersion() != null) { + result.setVersion(VersionTagMapper.map(mlPrimaryKeyProperties.getVersion())); + } + result.setSources( + mlPrimaryKeyProperties.getSources().stream() + .map( + urn -> { + final Dataset dataset = new Dataset(); + dataset.setUrn(urn.toString()); + return dataset; + }) .collect(Collectors.toList())); - return result; - } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java index 05b34ba3acb9c3..76fa8c84e95710 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/MetricsMapper.java @@ -2,22 +2,21 @@ import com.linkedin.datahub.graphql.generated.Metrics; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; public class MetricsMapper implements ModelMapper { - public static final MetricsMapper INSTANCE = new MetricsMapper(); + public static final MetricsMapper INSTANCE = new MetricsMapper(); - public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - return INSTANCE.apply(metrics); - } + public static Metrics map(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + return INSTANCE.apply(metrics); + } - @Override - public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { - final Metrics result = new Metrics(); - result.setDecisionThreshold(metrics.getDecisionThreshold()); - result.setPerformanceMeasures(metrics.getPerformanceMeasures()); - return result; - } + @Override + public Metrics apply(@NonNull final com.linkedin.ml.metadata.Metrics metrics) { + final Metrics result = new Metrics(); + result.setDecisionThreshold(metrics.getDecisionThreshold()); + result.setPerformanceMeasures(metrics.getPerformanceMeasures()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java index 8bd25a44745790..e46cb0a074bd7b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/QuantitativeAnalysesMapper.java @@ -2,22 +2,25 @@ import com.linkedin.datahub.graphql.generated.QuantitativeAnalyses; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class QuantitativeAnalysesMapper implements ModelMapper { +public class QuantitativeAnalysesMapper + implements ModelMapper { - public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); + public static final QuantitativeAnalysesMapper INSTANCE = new QuantitativeAnalysesMapper(); - public static QuantitativeAnalyses map(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - return INSTANCE.apply(quantitativeAnalyses); - } + public static QuantitativeAnalyses map( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + return INSTANCE.apply(quantitativeAnalyses); + } - @Override - public QuantitativeAnalyses apply(@NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { - final QuantitativeAnalyses result = new QuantitativeAnalyses(); - result.setIntersectionalResults(ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); - result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); - return result; - } + @Override + public QuantitativeAnalyses apply( + @NonNull final com.linkedin.ml.metadata.QuantitativeAnalyses quantitativeAnalyses) { + final QuantitativeAnalyses result = new QuantitativeAnalyses(); + result.setIntersectionalResults( + ResultsTypeMapper.map(quantitativeAnalyses.getIntersectionalResults())); + result.setUnitaryResults(ResultsTypeMapper.map(quantitativeAnalyses.getUnitaryResults())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java index 78292f08f8cade..4b6529c59db3e8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/ResultsTypeMapper.java @@ -3,25 +3,25 @@ import com.linkedin.datahub.graphql.generated.ResultsType; import com.linkedin.datahub.graphql.generated.StringBox; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import lombok.NonNull; -public class ResultsTypeMapper implements ModelMapper { +public class ResultsTypeMapper + implements ModelMapper { - public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); + public static final ResultsTypeMapper INSTANCE = new ResultsTypeMapper(); - public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - return INSTANCE.apply(input); - } + public static ResultsType map(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + return INSTANCE.apply(input); + } - @Override - public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { - final ResultsType result; - if (input.isString()) { - result = new StringBox(input.getString()); - } else { - throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); - } - return result; + @Override + public ResultsType apply(@NonNull final com.linkedin.ml.metadata.ResultsType input) { + final ResultsType result; + if (input.isString()) { + result = new StringBox(input.getString()); + } else { + throw new RuntimeException("Type is not one of the Union Types, Type:" + input.toString()); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java index 79dbd2cded4c25..b6bd5efdc42175 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/SourceCodeUrlMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.mlmodel.mappers; -import javax.annotation.Nonnull; - import com.linkedin.datahub.graphql.generated.SourceCodeUrl; import com.linkedin.datahub.graphql.generated.SourceCodeUrlType; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import javax.annotation.Nonnull; -public class SourceCodeUrlMapper implements ModelMapper { - public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); +public class SourceCodeUrlMapper + implements ModelMapper { + public static final SourceCodeUrlMapper INSTANCE = new SourceCodeUrlMapper(); - public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - return INSTANCE.apply(input); - } + public static SourceCodeUrl map(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + return INSTANCE.apply(input); + } - @Override - public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { - final SourceCodeUrl results = new SourceCodeUrl(); - results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); - results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); - return results; - } + @Override + public SourceCodeUrl apply(@Nonnull final com.linkedin.ml.metadata.SourceCodeUrl input) { + final SourceCodeUrl results = new SourceCodeUrl(); + results.setType(SourceCodeUrlType.valueOf(input.getType().toString())); + results.setSourceCodeUrl(input.getSourceCodeUrl().toString()); + return results; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java index 6ad0945b0621fd..5758a52538c1e5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/mlmodel/mappers/VersionTagMapper.java @@ -2,20 +2,22 @@ import com.linkedin.common.VersionTag; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class VersionTagMapper implements ModelMapper { - public static final VersionTagMapper INSTANCE = new VersionTagMapper(); +public class VersionTagMapper + implements ModelMapper { + public static final VersionTagMapper INSTANCE = new VersionTagMapper(); - public static com.linkedin.datahub.graphql.generated.VersionTag map(@Nonnull final VersionTag versionTag) { - return INSTANCE.apply(versionTag); - } + public static com.linkedin.datahub.graphql.generated.VersionTag map( + @Nonnull final VersionTag versionTag) { + return INSTANCE.apply(versionTag); + } - @Override - public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { - final com.linkedin.datahub.graphql.generated.VersionTag result = new com.linkedin.datahub.graphql.generated.VersionTag(); - result.setVersionTag(input.getVersionTag()); - return result; - } + @Override + public com.linkedin.datahub.graphql.generated.VersionTag apply(@Nonnull final VersionTag input) { + final com.linkedin.datahub.graphql.generated.VersionTag result = + new com.linkedin.datahub.graphql.generated.VersionTag(); + result.setVersionTag(input.getVersionTag()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index 080cdeba09f19e..b6990c3816b53f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.types.notebook; +import static com.linkedin.datahub.graphql.Constants.*; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.CorpuserUrn; @@ -9,8 +14,6 @@ import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.BrowsePath; @@ -18,25 +21,25 @@ import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FacetFilterInput; -import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.NotebookUpdateInput; +import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.BrowsableEntityType; import com.linkedin.datahub.graphql.types.MutableType; import com.linkedin.datahub.graphql.types.SearchableEntityType; +import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowsePathsMapper; import com.linkedin.datahub.graphql.types.mappers.BrowseResultMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookMapper; import com.linkedin.datahub.graphql.types.notebook.mappers.NotebookUpdateInputMapper; -import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.browse.BrowseResult; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -53,25 +56,25 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.datahub.graphql.Constants.*; -import static com.linkedin.metadata.Constants.*; - -public class NotebookType implements SearchableEntityType, BrowsableEntityType, - MutableType { - static final Set ASPECTS_TO_RESOLVE = ImmutableSet.of( - NOTEBOOK_KEY_ASPECT_NAME, - NOTEBOOK_INFO_ASPECT_NAME, - NOTEBOOK_CONTENT_ASPECT_NAME, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - OWNERSHIP_ASPECT_NAME, - STATUS_ASPECT_NAME, - GLOBAL_TAGS_ASPECT_NAME, - GLOSSARY_TERMS_ASPECT_NAME, - INSTITUTIONAL_MEMORY_ASPECT_NAME, - DOMAINS_ASPECT_NAME, - SUB_TYPES_ASPECT_NAME, - DATA_PLATFORM_INSTANCE_ASPECT_NAME, - BROWSE_PATHS_V2_ASPECT_NAME); +public class NotebookType + implements SearchableEntityType, + BrowsableEntityType, + MutableType { + static final Set ASPECTS_TO_RESOLVE = + ImmutableSet.of( + NOTEBOOK_KEY_ASPECT_NAME, + NOTEBOOK_INFO_ASPECT_NAME, + NOTEBOOK_CONTENT_ASPECT_NAME, + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, + OWNERSHIP_ASPECT_NAME, + STATUS_ASPECT_NAME, + GLOBAL_TAGS_ASPECT_NAME, + GLOSSARY_TERMS_ASPECT_NAME, + INSTITUTIONAL_MEMORY_ASPECT_NAME, + DOMAINS_ASPECT_NAME, + SUB_TYPES_ASPECT_NAME, + DATA_PLATFORM_INSTANCE_ASPECT_NAME, + BROWSE_PATHS_V2_ASPECT_NAME); private final EntityClient _entityClient; @@ -80,44 +83,68 @@ public NotebookType(EntityClient entityClient) { } @Override - public SearchResults search(@Nonnull String query, + public SearchResults search( + @Nonnull String query, @Nullable List filters, int start, int count, - @Nonnull final QueryContext context) throws Exception { + @Nonnull final QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map facetFilters = Collections.emptyMap(); - final SearchResult searchResult = _entityClient.search(NOTEBOOK_ENTITY_NAME, query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); + final SearchResult searchResult = + _entityClient.search( + NOTEBOOK_ENTITY_NAME, + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); return UrnSearchResultsMapper.map(searchResult); } @Override - public AutoCompleteResults autoComplete(@Nonnull String query, + public AutoCompleteResults autoComplete( + @Nonnull String query, @Nullable String field, @Nullable Filter filters, int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + NOTEBOOK_ENTITY_NAME, query, filters, limit, context.getAuthentication()); return AutoCompleteResultsMapper.map(result); } @Override - public BrowseResults browse(@Nonnull List path, @Nullable List filters, int start, - int count, @Nonnull QueryContext context) throws Exception { + public BrowseResults browse( + @Nonnull List path, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { // Put empty map here according to // https://datahubspace.slack.com/archives/C029A3M079U/p1646288772126639 final Map facetFilters = Collections.emptyMap(); - final String pathStr = path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; - final BrowseResult result = _entityClient.browse(NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); + final String pathStr = + path.size() > 0 ? BROWSE_PATH_DELIMITER + String.join(BROWSE_PATH_DELIMITER, path) : ""; + final BrowseResult result = + _entityClient.browse( + NOTEBOOK_ENTITY_NAME, pathStr, facetFilters, start, count, context.getAuthentication()); return BrowseResultMapper.map(result); } @Override - public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) throws Exception { - final StringArray result = _entityClient.getBrowsePaths(NotebookUrn.createFromString(urn), context.getAuthentication()); + public List browsePaths(@Nonnull String urn, @Nonnull QueryContext context) + throws Exception { + final StringArray result = + _entityClient.getBrowsePaths( + NotebookUrn.createFromString(urn), context.getAuthentication()); return BrowsePathsMapper.map(result); } @@ -137,22 +164,26 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urnStrs, @Nonnull QueryContext context) - throws Exception { - final List urns = urnStrs.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urnStrs, @Nonnull QueryContext context) throws Exception { + final List urns = urnStrs.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { - final Map notebookMap = _entityClient.batchGetV2(NOTEBOOK_ENTITY_NAME, new HashSet<>(urns), - ASPECTS_TO_RESOLVE, context.getAuthentication()); + final Map notebookMap = + _entityClient.batchGetV2( + NOTEBOOK_ENTITY_NAME, + new HashSet<>(urns), + ASPECTS_TO_RESOLVE, + context.getAuthentication()); return urns.stream() .map(urn -> notebookMap.getOrDefault(urn, null)) - .map(entityResponse -> entityResponse == null - ? null - : DataFetcherResult.newResult() - .data(NotebookMapper.map(entityResponse)) - .build()) + .map( + entityResponse -> + entityResponse == null + ? null + : DataFetcherResult.newResult() + .data(NotebookMapper.map(entityResponse)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Notebook", e); @@ -165,13 +196,16 @@ public Class inputClass() { } @Override - public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) + public Notebook update( + @Nonnull String urn, @Nonnull NotebookUpdateInput input, @Nonnull QueryContext context) throws Exception { if (!isAuthorized(urn, input, context)) { - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); } - CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); Collection proposals = NotebookUpdateInputMapper.map(input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); @@ -184,7 +218,8 @@ public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, return load(urn, context).getData(); } - private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { + private boolean isAuthorized( + @Nonnull String urn, @Nonnull NotebookUpdateInput update, @Nonnull QueryContext context) { // Decide whether the current principal should be allowed to update the Dataset. final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); return AuthorizationUtils.isAuthorized( @@ -197,9 +232,9 @@ private boolean isAuthorized(@Nonnull String urn, @Nonnull NotebookUpdateInput u private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateInput updateInput) { - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); List specificPrivileges = new ArrayList<>(); if (updateInput.getOwnership() != null) { @@ -211,12 +246,12 @@ private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final NotebookUpdateIn if (updateInput.getTags() != null) { specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_TAGS_PRIVILEGE.getType()); } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java index 2b937c86c9779e..a263e31b26faf1 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.BrowsePathsV2; import com.linkedin.common.DataPlatformInstance; import com.linkedin.common.GlobalTags; @@ -26,11 +28,11 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.BrowsePathsV2Mapper; import com.linkedin.datahub.graphql.types.common.mappers.ChangeAuditStampsMapper; +import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.DataPlatformInstanceAspectMapper; import com.linkedin.datahub.graphql.types.common.mappers.InstitutionalMemoryMapper; import com.linkedin.datahub.graphql.types.common.mappers.OwnershipMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; -import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; import com.linkedin.datahub.graphql.types.glossary.mappers.GlossaryTermsMapper; @@ -45,8 +47,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - public class NotebookMapper implements ModelMapper { public static final NotebookMapper INSTANCE = new NotebookMapper(); @@ -64,41 +64,59 @@ public Notebook apply(EntityResponse response) { EnvelopedAspectMap aspectMap = response.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, convertedNotebook); mappingHelper.mapToResult(NOTEBOOK_KEY_ASPECT_NAME, this::mapNotebookKey); - mappingHelper.mapToResult(NOTEBOOK_INFO_ASPECT_NAME, (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); + mappingHelper.mapToResult( + NOTEBOOK_INFO_ASPECT_NAME, + (entity, dataMap) -> this.mapNotebookInfo(entity, dataMap, entityUrn)); mappingHelper.mapToResult(NOTEBOOK_CONTENT_ASPECT_NAME, this::mapNotebookContent); - mappingHelper.mapToResult(EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (notebook, dataMap) -> notebook.setOwnership( - OwnershipMapper.map(new Ownership(dataMap), entityUrn) - )); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); - mappingHelper.mapToResult(GLOBAL_TAGS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); - mappingHelper.mapToResult(INSTITUTIONAL_MEMORY_ASPECT_NAME, (notebook, dataMap) -> - notebook.setInstitutionalMemory(InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); + mappingHelper.mapToResult( + EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, this::mapEditableNotebookProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, + (notebook, dataMap) -> notebook.setStatus(StatusMapper.map(new Status(dataMap)))); + mappingHelper.mapToResult( + GLOBAL_TAGS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setTags(GlobalTagsMapper.map(new GlobalTags(dataMap), entityUrn))); + mappingHelper.mapToResult( + INSTITUTIONAL_MEMORY_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setInstitutionalMemory( + InstitutionalMemoryMapper.map(new InstitutionalMemory(dataMap), entityUrn))); mappingHelper.mapToResult(DOMAINS_ASPECT_NAME, this::mapDomains); mappingHelper.mapToResult(SUB_TYPES_ASPECT_NAME, this::mapSubTypes); - mappingHelper.mapToResult(GLOSSARY_TERMS_ASPECT_NAME, (notebook, dataMap) -> - notebook.setGlossaryTerms(GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); + mappingHelper.mapToResult( + GLOSSARY_TERMS_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setGlossaryTerms( + GlossaryTermsMapper.map(new GlossaryTerms(dataMap), entityUrn))); mappingHelper.mapToResult(DATA_PLATFORM_INSTANCE_ASPECT_NAME, this::mapDataPlatformInstance); - mappingHelper.mapToResult(BROWSE_PATHS_V2_ASPECT_NAME, (notebook, dataMap) -> - notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); + mappingHelper.mapToResult( + BROWSE_PATHS_V2_ASPECT_NAME, + (notebook, dataMap) -> + notebook.setBrowsePathV2(BrowsePathsV2Mapper.map(new BrowsePathsV2(dataMap)))); return mappingHelper.getResult(); } private void mapDataPlatformInstance(Notebook notebook, DataMap dataMap) { DataPlatformInstance dataPlatformInstance = new DataPlatformInstance(dataMap); - notebook.setPlatform(DataPlatform - .builder() - .setType(EntityType.DATA_PLATFORM) - .setUrn(dataPlatformInstance.getPlatform().toString()) - .build()); - notebook.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); + notebook.setPlatform( + DataPlatform.builder() + .setType(EntityType.DATA_PLATFORM) + .setUrn(dataPlatformInstance.getPlatform().toString()) + .build()); + notebook.setDataPlatformInstance( + DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap))); } private void mapSubTypes(Notebook notebook, DataMap dataMap) { SubTypes pegasusSubTypes = new SubTypes(dataMap); if (pegasusSubTypes.hasTypeNames()) { - com.linkedin.datahub.graphql.generated.SubTypes subTypes = new com.linkedin.datahub.graphql.generated.SubTypes(); + com.linkedin.datahub.graphql.generated.SubTypes subTypes = + new com.linkedin.datahub.graphql.generated.SubTypes(); subTypes.setTypeNames(pegasusSubTypes.getTypeNames().stream().collect(Collectors.toList())); notebook.setSubTypes(subTypes); } @@ -110,11 +128,14 @@ private void mapNotebookKey(@Nonnull Notebook notebook, @Nonnull DataMap dataMap notebook.setTool(notebookKey.getNotebookTool()); } - private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { - final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = new com.linkedin.notebook.NotebookInfo(dataMap); + private void mapNotebookInfo( + @Nonnull Notebook notebook, @Nonnull DataMap dataMap, Urn entityUrn) { + final com.linkedin.notebook.NotebookInfo gmsNotebookInfo = + new com.linkedin.notebook.NotebookInfo(dataMap); final NotebookInfo notebookInfo = new NotebookInfo(); notebookInfo.setTitle(gmsNotebookInfo.getTitle()); - notebookInfo.setChangeAuditStamps(ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); + notebookInfo.setChangeAuditStamps( + ChangeAuditStampsMapper.map(gmsNotebookInfo.getChangeAuditStamps())); notebookInfo.setDescription(gmsNotebookInfo.getDescription()); if (gmsNotebookInfo.hasExternalUrl()) { @@ -122,40 +143,46 @@ private void mapNotebookInfo(@Nonnull Notebook notebook, @Nonnull DataMap dataMa } if (gmsNotebookInfo.hasCustomProperties()) { - notebookInfo.setCustomProperties(CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); + notebookInfo.setCustomProperties( + CustomPropertiesMapper.map(gmsNotebookInfo.getCustomProperties(), entityUrn)); } notebook.setInfo(notebookInfo); } private void mapNotebookContent(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - com.linkedin.notebook.NotebookContent pegasusNotebookContent = new com.linkedin.notebook.NotebookContent(dataMap); + com.linkedin.notebook.NotebookContent pegasusNotebookContent = + new com.linkedin.notebook.NotebookContent(dataMap); NotebookContent notebookContent = new NotebookContent(); notebookContent.setCells(mapNotebookCells(pegasusNotebookContent.getCells())); notebook.setContent(notebookContent); } - private List mapNotebookCells(com.linkedin.notebook.NotebookCellArray pegasusCells) { + private List mapNotebookCells( + com.linkedin.notebook.NotebookCellArray pegasusCells) { return pegasusCells.stream() - .map(pegasusCell -> { - NotebookCell notebookCell = new NotebookCell(); - NotebookCellType cellType = NotebookCellType.valueOf(pegasusCell.getType().toString()); - notebookCell.setType(cellType); - switch (cellType) { - case CHART_CELL: - notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); - break; - case TEXT_CELL: - notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); - break; - case QUERY_CELL: - notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); - break; - default: - throw new DataHubGraphQLException(String.format("Un-supported NotebookCellType: %s", cellType), - DataHubGraphQLErrorCode.SERVER_ERROR); - } - return notebookCell; - }) + .map( + pegasusCell -> { + NotebookCell notebookCell = new NotebookCell(); + NotebookCellType cellType = + NotebookCellType.valueOf(pegasusCell.getType().toString()); + notebookCell.setType(cellType); + switch (cellType) { + case CHART_CELL: + notebookCell.setChartCell(mapChartCell(pegasusCell.getChartCell())); + break; + case TEXT_CELL: + notebookCell.setTextCell(mapTextCell(pegasusCell.getTextCell())); + break; + case QUERY_CELL: + notebookCell.setQueryChell(mapQueryCell(pegasusCell.getQueryCell())); + break; + default: + throw new DataHubGraphQLException( + String.format("Un-supported NotebookCellType: %s", cellType), + DataHubGraphQLErrorCode.SERVER_ERROR); + } + return notebookCell; + }) .collect(Collectors.toList()); } @@ -163,7 +190,8 @@ private ChartCell mapChartCell(com.linkedin.notebook.ChartCell pegasusChartCell) ChartCell chartCell = new ChartCell(); chartCell.setCellId(pegasusChartCell.getCellId()); chartCell.setCellTitle(pegasusChartCell.getCellTitle()); - chartCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); + chartCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusChartCell.getChangeAuditStamps())); return chartCell; } @@ -171,7 +199,8 @@ private TextCell mapTextCell(com.linkedin.notebook.TextCell pegasusTextCell) { TextCell textCell = new TextCell(); textCell.setCellId(pegasusTextCell.getCellId()); textCell.setCellTitle(pegasusTextCell.getCellTitle()); - textCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); + textCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusTextCell.getChangeAuditStamps())); textCell.setText(pegasusTextCell.getText()); return textCell; } @@ -180,7 +209,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) QueryCell queryCell = new QueryCell(); queryCell.setCellId(pegasusQueryCell.getCellId()); queryCell.setCellTitle(pegasusQueryCell.getCellTitle()); - queryCell.setChangeAuditStamps(ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); + queryCell.setChangeAuditStamps( + ChangeAuditStampsMapper.map(pegasusQueryCell.getChangeAuditStamps())); queryCell.setRawQuery(pegasusQueryCell.getRawQuery()); if (pegasusQueryCell.hasLastExecuted()) { queryCell.setLastExecuted(AuditStampMapper.map(pegasusQueryCell.getLastExecuted())); @@ -189,7 +219,8 @@ private QueryCell mapQueryCell(com.linkedin.notebook.QueryCell pegasusQueryCell) } private void mapEditableNotebookProperties(@Nonnull Notebook notebook, @Nonnull DataMap dataMap) { - final EditableNotebookProperties editableNotebookProperties = new EditableNotebookProperties(dataMap); + final EditableNotebookProperties editableNotebookProperties = + new EditableNotebookProperties(dataMap); final NotebookEditableProperties notebookEditableProperties = new NotebookEditableProperties(); notebookEditableProperties.setDescription(editableNotebookProperties.getDescription()); notebook.setEditableProperties(notebookEditableProperties); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java index 0c3787d630500a..0d6c70e07053fc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/mappers/NotebookUpdateInputMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; import com.linkedin.common.TagAssociationArray; @@ -17,16 +19,13 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - -public class NotebookUpdateInputMapper implements InputModelMapper, - Urn> { +public class NotebookUpdateInputMapper + implements InputModelMapper, Urn> { public static final NotebookUpdateInputMapper INSTANCE = new NotebookUpdateInputMapper(); - public static Collection map(@Nonnull final NotebookUpdateInput notebookUpdateInput, - @Nonnull final Urn actor) { + public static Collection map( + @Nonnull final NotebookUpdateInput notebookUpdateInput, @Nonnull final Urn actor) { return INSTANCE.apply(notebookUpdateInput, actor); } @@ -39,27 +38,32 @@ public Collection apply(NotebookUpdateInput input, Urn a auditStamp.setTime(System.currentTimeMillis()); if (input.getOwnership() != null) { - proposals.add(updateMappingHelper.aspectToProposal(OwnershipUpdateMapper.map(input.getOwnership(), actor), - OWNERSHIP_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + OwnershipUpdateMapper.map(input.getOwnership(), actor), OWNERSHIP_ASPECT_NAME)); } if (input.getTags() != null) { final GlobalTags globalTags = new GlobalTags(); - globalTags.setTags(new TagAssociationArray(input.getTags().getTags().stream() - .map(TagAssociationUpdateMapper::map) - .collect(Collectors.toList()))); + globalTags.setTags( + new TagAssociationArray( + input.getTags().getTags().stream() + .map(TagAssociationUpdateMapper::map) + .collect(Collectors.toList()))); proposals.add(updateMappingHelper.aspectToProposal(globalTags, GLOBAL_TAGS_ASPECT_NAME)); } if (input.getEditableProperties() != null) { - final EditableDashboardProperties editableDashboardProperties = new EditableDashboardProperties(); + final EditableDashboardProperties editableDashboardProperties = + new EditableDashboardProperties(); editableDashboardProperties.setDescription(input.getEditableProperties().getDescription()); if (!editableDashboardProperties.hasCreated()) { editableDashboardProperties.setCreated(auditStamp); } editableDashboardProperties.setLastModified(auditStamp); - proposals.add(updateMappingHelper.aspectToProposal(editableDashboardProperties, - EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal( + editableDashboardProperties, EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME)); } return proposals; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java index 79f95ac8439a55..f7ed4c59a805a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipType.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import graphql.execution.DataFetcherResult; @@ -20,14 +22,12 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class OwnershipType implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(OWNERSHIP_TYPE_INFO_ASPECT_NAME, STATUS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +46,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, - @Nonnull QueryContext context) throws Exception { - final List ownershipTypeUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List ownershipTypeUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(OWNERSHIP_TYPE_ENTITY_NAME, new HashSet<>(ownershipTypeUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + OWNERSHIP_TYPE_ENTITY_NAME, + new HashSet<>(ownershipTypeUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -60,12 +64,16 @@ public List> batchLoad(@Nonnull List gmsResult == null ? null : DataFetcherResult.newResult() - .data(OwnershipTypeMapper.map(gmsResult)) - .build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(OwnershipTypeMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Custom Ownership Types", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java index 37b59b679e3ac3..9eebe95df8d8cc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/ownership/OwnershipTypeMapper.java @@ -1,12 +1,14 @@ package com.linkedin.datahub.graphql.types.ownership; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Status; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.OwnershipTypeInfo; -import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; @@ -14,9 +16,6 @@ import com.linkedin.entity.EnvelopedAspectMap; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class OwnershipTypeMapper implements ModelMapper { public static final OwnershipTypeMapper INSTANCE = new OwnershipTypeMapper(); @@ -34,12 +33,14 @@ public OwnershipTypeEntity apply(@Nonnull EntityResponse input) { EnvelopedAspectMap aspectMap = input.getAspects(); MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); mappingHelper.mapToResult(OWNERSHIP_TYPE_INFO_ASPECT_NAME, this::mapOwnershipTypeInfo); - mappingHelper.mapToResult(STATUS_ASPECT_NAME, + mappingHelper.mapToResult( + STATUS_ASPECT_NAME, (dataset, dataMap) -> dataset.setStatus(StatusMapper.map(new Status(dataMap)))); return mappingHelper.getResult(); } - private void mapOwnershipTypeInfo(@Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { + private void mapOwnershipTypeInfo( + @Nonnull OwnershipTypeEntity ownershipTypeEntity, @Nonnull DataMap dataMap) { final com.linkedin.ownership.OwnershipTypeInfo gmsOwnershipTypeInfo = new com.linkedin.ownership.OwnershipTypeInfo(dataMap); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java index 167e1615fc4cc5..318818b8a21408 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -25,9 +27,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubPolicyMapper implements ModelMapper { public static final DataHubPolicyMapper INSTANCE = new DataHubPolicyMapper(); @@ -71,16 +70,20 @@ private ActorFilter mapActors(final DataHubActorFilter actorFilter) { // Change here is not executed at the moment - leaving it for the future UrnArray resourceOwnersTypes = actorFilter.getResourceOwnersTypes(); if (resourceOwnersTypes != null) { - result.setResourceOwnersTypes(resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); + result.setResourceOwnersTypes( + resourceOwnersTypes.stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasGroups()) { - result.setGroups(actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); + result.setGroups( + actorFilter.getGroups().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasUsers()) { - result.setUsers(actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); + result.setUsers( + actorFilter.getUsers().stream().map(Urn::toString).collect(Collectors.toList())); } if (actorFilter.hasRoles()) { - result.setRoles(actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); + result.setRoles( + actorFilter.getRoles().stream().map(Urn::toString).collect(Collectors.toList())); } return result; } @@ -102,14 +105,20 @@ private ResourceFilter mapResources(final DataHubResourceFilter resourceFilter) private PolicyMatchFilter mapFilter(final com.linkedin.policy.PolicyMatchFilter filter) { return PolicyMatchFilter.builder() - .setCriteria(filter.getCriteria() - .stream() - .map(criterion -> PolicyMatchCriterion.builder() - .setField(criterion.getField()) - .setValues(criterion.getValues().stream().map(this::mapValue).collect(Collectors.toList())) - .setCondition(PolicyMatchCondition.valueOf(criterion.getCondition().name())) - .build()) - .collect(Collectors.toList())) + .setCriteria( + filter.getCriteria().stream() + .map( + criterion -> + PolicyMatchCriterion.builder() + .setField(criterion.getField()) + .setValues( + criterion.getValues().stream() + .map(this::mapValue) + .collect(Collectors.toList())) + .setCondition( + PolicyMatchCondition.valueOf(criterion.getCondition().name())) + .build()) + .collect(Collectors.toList())) .build(); } @@ -117,7 +126,10 @@ private PolicyMatchCriterionValue mapValue(final String value) { try { // If value is urn, set entity field Urn urn = Urn.createFromString(value); - return PolicyMatchCriterionValue.builder().setValue(value).setEntity(UrnToEntityMapper.map(urn)).build(); + return PolicyMatchCriterionValue.builder() + .setValue(value) + .setEntity(UrnToEntityMapper.map(urn)) + .build(); } catch (URISyntaxException e) { // Value is not an urn. Just set value return PolicyMatchCriterionValue.builder().setValue(value).build(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java index 4cec59009af3fe..3dea9046dcf36f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/policy/DataHubPolicyType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.policy; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubPolicyType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubPolicyType + implements com.linkedin.datahub.graphql.types.EntityType { static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_POLICY_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(POLICY_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + POLICY_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List> batchLoad(@Nonnull List ur gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubPolicyMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubPolicyMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java index 791197c7d47e49..f35111f78a6944 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.post; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,9 +18,6 @@ import com.linkedin.post.PostInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class PostMapper implements ModelMapper { public static final PostMapper INSTANCE = new PostMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java index cf77821b1a2808..2bdcda3592608d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; import com.linkedin.data.template.GetMode; @@ -21,9 +23,6 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class QueryMapper implements ModelMapper { public static final QueryMapper INSTANCE = new QueryMapper(); @@ -47,13 +46,15 @@ public QueryEntity apply(@Nonnull final EntityResponse entityResponse) { private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QueryProperties queryProperties = new QueryProperties(dataMap); - com.linkedin.datahub.graphql.generated.QueryProperties res = new com.linkedin.datahub.graphql.generated.QueryProperties(); + com.linkedin.datahub.graphql.generated.QueryProperties res = + new com.linkedin.datahub.graphql.generated.QueryProperties(); // Query Source must be kept in sync. res.setSource(QuerySource.valueOf(queryProperties.getSource().toString())); - res.setStatement(new QueryStatement( - queryProperties.getStatement().getValue(), - QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); + res.setStatement( + new QueryStatement( + queryProperties.getStatement().getValue(), + QueryLanguage.valueOf(queryProperties.getStatement().getLanguage().toString()))); res.setName(queryProperties.getName(GetMode.NULL)); res.setDescription(queryProperties.getDescription(GetMode.NULL)); @@ -73,10 +74,10 @@ private void mapQueryProperties(@Nonnull QueryEntity query, @Nonnull DataMap dat @Nonnull private void mapQuerySubjects(@Nonnull QueryEntity query, @Nonnull DataMap dataMap) { QuerySubjects querySubjects = new QuerySubjects(dataMap); - List res = querySubjects.getSubjects() - .stream() - .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) - .collect(Collectors.toList()); + List res = + querySubjects.getSubjects().stream() + .map(sub -> new QuerySubject(createPartialDataset(sub.getEntity()))) + .collect(Collectors.toList()); query.setSubjects(res); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java index c138cd56f20b3f..0c1fd33e381104 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/query/QueryType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.query; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -20,14 +22,11 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class QueryType implements com.linkedin.datahub.graphql.types.EntityType { - public static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - QUERY_PROPERTIES_ASPECT_NAME, - QUERY_SUBJECTS_ASPECT_NAME); +public class QueryType + implements com.linkedin.datahub.graphql.types.EntityType { + public static final Set ASPECTS_TO_FETCH = + ImmutableSet.of(QUERY_PROPERTIES_ASPECT_NAME, QUERY_SUBJECTS_ASPECT_NAME); private final EntityClient _entityClient; @Override @@ -46,13 +45,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List viewUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(QUERY_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + QUERY_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -60,11 +62,16 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(QueryMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(QueryMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Queries", e); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java index e1762022f4bcbe..db086e682d57c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DataFlowDataJobsRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DataFlowDataJobsRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DataFlowDataJobsRelationshipsMapper implements - ModelMapper { +public class DataFlowDataJobsRelationshipsMapper + implements ModelMapper { - public static final DataFlowDataJobsRelationshipsMapper INSTANCE = new DataFlowDataJobsRelationshipsMapper(); + public static final DataFlowDataJobsRelationshipsMapper INSTANCE = + new DataFlowDataJobsRelationshipsMapper(); - public static DataFlowDataJobsRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DataFlowDataJobsRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DataFlowDataJobsRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DataFlowDataJobsRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DataFlowDataJobsRelationships result = new DataFlowDataJobsRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java index 824e1181c58710..4df64c7ecb85ed 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/DownstreamEntityRelationshipsMapper.java @@ -2,26 +2,28 @@ import com.linkedin.datahub.graphql.generated.DownstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class DownstreamEntityRelationshipsMapper implements - ModelMapper { +public class DownstreamEntityRelationshipsMapper + implements ModelMapper { - public static final DownstreamEntityRelationshipsMapper INSTANCE = new DownstreamEntityRelationshipsMapper(); + public static final DownstreamEntityRelationshipsMapper INSTANCE = + new DownstreamEntityRelationshipsMapper(); - public static DownstreamEntityRelationships map( - @Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static DownstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public DownstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public DownstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final DownstreamEntityRelationships result = new DownstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java index 58f4f477bc7e6b..e3743804b49080 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/EntityRelationshipLegacyMapper.java @@ -5,28 +5,32 @@ import com.linkedin.datahub.graphql.types.common.mappers.AuditStampMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - import javax.annotation.Nonnull; -public class EntityRelationshipLegacyMapper implements ModelMapper { +public class EntityRelationshipLegacyMapper + implements ModelMapper { - public static final EntityRelationshipLegacyMapper INSTANCE = new EntityRelationshipLegacyMapper(); + public static final EntityRelationshipLegacyMapper INSTANCE = + new EntityRelationshipLegacyMapper(); - public static EntityRelationshipLegacy map(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - return INSTANCE.apply(relationship); - } + public static EntityRelationshipLegacy map( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + return INSTANCE.apply(relationship); + } - @Override - public EntityRelationshipLegacy apply(@Nonnull final com.linkedin.common.EntityRelationship relationship) { - final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); + @Override + public EntityRelationshipLegacy apply( + @Nonnull final com.linkedin.common.EntityRelationship relationship) { + final EntityRelationshipLegacy result = new EntityRelationshipLegacy(); - EntityWithRelationships partialLineageEntity = (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); - if (partialLineageEntity != null) { - result.setEntity(partialLineageEntity); - } - if (relationship.hasCreated()) { - result.setCreated(AuditStampMapper.map(relationship.getCreated())); - } - return result; + EntityWithRelationships partialLineageEntity = + (EntityWithRelationships) UrnToEntityMapper.map(relationship.getEntity()); + if (partialLineageEntity != null) { + result.setEntity(partialLineageEntity); + } + if (relationship.hasCreated()) { + result.setCreated(AuditStampMapper.map(relationship.getCreated())); } + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java index 7db5e08c73fc60..832e1bb396b3ba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/relationships/mappers/UpstreamEntityRelationshipsMapper.java @@ -2,24 +2,28 @@ import com.linkedin.datahub.graphql.generated.UpstreamEntityRelationships; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class UpstreamEntityRelationshipsMapper implements ModelMapper { +public class UpstreamEntityRelationshipsMapper + implements ModelMapper { - public static final UpstreamEntityRelationshipsMapper INSTANCE = new UpstreamEntityRelationshipsMapper(); + public static final UpstreamEntityRelationshipsMapper INSTANCE = + new UpstreamEntityRelationshipsMapper(); - public static UpstreamEntityRelationships map(@Nonnull final com.linkedin.common.EntityRelationships relationships) { - return INSTANCE.apply(relationships); - } + public static UpstreamEntityRelationships map( + @Nonnull final com.linkedin.common.EntityRelationships relationships) { + return INSTANCE.apply(relationships); + } - @Override - public UpstreamEntityRelationships apply(@Nonnull final com.linkedin.common.EntityRelationships input) { - final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); - result.setEntities(input.getRelationships().stream().map( - EntityRelationshipLegacyMapper::map - ).collect(Collectors.toList())); - return result; - } + @Override + public UpstreamEntityRelationships apply( + @Nonnull final com.linkedin.common.EntityRelationships input) { + final UpstreamEntityRelationships result = new UpstreamEntityRelationships(); + result.setEntities( + input.getRelationships().stream() + .map(EntityRelationshipLegacyMapper::map) + .collect(Collectors.toList())); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java index 8c6496390943bf..95219457701955 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/DataHubRoleType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -21,11 +23,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubRoleType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubRoleType + implements com.linkedin.datahub.graphql.types.EntityType { static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_ROLE_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -45,13 +45,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List roleUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATAHUB_ROLE_ENTITY_NAME, new HashSet<>(roleUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_ROLE_ENTITY_NAME, + new HashSet<>(roleUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -59,8 +62,13 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubRoleMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubRoleMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Roles", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java index 5ba31a1602780e..7a467886fc0844 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/role/mappers/DataHubRoleMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.role.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubRole; import com.linkedin.datahub.graphql.generated.EntityType; @@ -10,9 +12,6 @@ import com.linkedin.policy.DataHubRoleInfo; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - public class DataHubRoleMapper implements ModelMapper { public static final DataHubRoleMapper INSTANCE = new DataHubRoleMapper(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java index 084c4d5033ad0a..d51e0d06c0fdaa 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/RoleType.java @@ -11,9 +11,9 @@ import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.SearchResults; import com.linkedin.datahub.graphql.types.SearchableEntityType; -import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.datahub.graphql.types.mappers.AutoCompleteResultsMapper; import com.linkedin.datahub.graphql.types.mappers.UrnSearchResultsMapper; +import com.linkedin.datahub.graphql.types.rolemetadata.mappers.RoleMapper; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -22,9 +22,6 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; @@ -33,88 +30,101 @@ import java.util.Set; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; -public class RoleType implements SearchableEntityType, +public class RoleType + implements SearchableEntityType, com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.ROLE_KEY, - Constants.ROLE_PROPERTIES_ASPECT_NAME, - Constants.ROLE_ACTORS_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = + ImmutableSet.of( + Constants.ROLE_KEY, + Constants.ROLE_PROPERTIES_ASPECT_NAME, + Constants.ROLE_ACTORS_ASPECT_NAME); - private final EntityClient _entityClient; + private final EntityClient _entityClient; - public RoleType(final EntityClient entityClient) { - _entityClient = entityClient; - } + public RoleType(final EntityClient entityClient) { + _entityClient = entityClient; + } - @Override - public EntityType type() { - return EntityType.ROLE; - } + @Override + public EntityType type() { + return EntityType.ROLE; + } - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } - @Override - public Class objectClass() { - return Role.class; - } + @Override + public Class objectClass() { + return Role.class; + } - @Override - public List> batchLoad(@Nonnull List urns, - @Nonnull QueryContext context) throws Exception { - final List externalRolesUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + @Override + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List externalRolesUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); - try { - final Map entities = _entityClient.batchGetV2( - Constants.ROLE_ENTITY_NAME, - new HashSet<>(externalRolesUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + try { + final Map entities = + _entityClient.batchGetV2( + Constants.ROLE_ENTITY_NAME, + new HashSet<>(externalRolesUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); - final List gmsResults = new ArrayList<>(); - for (Urn urn : externalRolesUrns) { - gmsResults.add(entities.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(RoleMapper.map(gmsResult)) - .build() - ) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Role", e); - } + final List gmsResults = new ArrayList<>(); + for (Urn urn : externalRolesUrns) { + gmsResults.add(entities.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult().data(RoleMapper.map(gmsResult)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Role", e); } + } - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull final QueryContext context) throws Exception { - final SearchResult searchResult = _entityClient.search(Constants.ROLE_ENTITY_NAME, - query, Collections.emptyMap(), start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull final QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete(Constants.ROLE_ENTITY_NAME, - query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull final QueryContext context) + throws Exception { + final SearchResult searchResult = + _entityClient.search( + Constants.ROLE_ENTITY_NAME, + query, + Collections.emptyMap(), + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull final QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete( + Constants.ROLE_ENTITY_NAME, query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java index cabace1a524413..3eb090e4524395 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/AccessMapper.java @@ -1,41 +1,39 @@ package com.linkedin.datahub.graphql.types.rolemetadata.mappers; - import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.Role; import com.linkedin.datahub.graphql.generated.RoleAssociation; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class AccessMapper { - public static final AccessMapper INSTANCE = new AccessMapper(); - - public static com.linkedin.datahub.graphql.generated.Access map( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - return INSTANCE.apply(access, entityUrn); - } - - public com.linkedin.datahub.graphql.generated.Access apply( - @Nonnull final com.linkedin.common.Access access, - @Nonnull final Urn entityUrn) { - com.linkedin.datahub.graphql.generated.Access result = new com.linkedin.datahub.graphql.generated.Access(); - result.setRoles(access.getRoles().stream().map( - association -> this.mapRoleAssociation(association, entityUrn) - ).collect(Collectors.toList())); - return result; - } - - private RoleAssociation mapRoleAssociation(com.linkedin.common.RoleAssociation association, Urn entityUrn) { - RoleAssociation roleAssociation = new RoleAssociation(); - Role role = new Role(); - role.setType(EntityType.ROLE); - role.setUrn(association.getUrn().toString()); - roleAssociation.setRole(role); - roleAssociation.setAssociatedUrn(entityUrn.toString()); - return roleAssociation; - } - + public static final AccessMapper INSTANCE = new AccessMapper(); + + public static com.linkedin.datahub.graphql.generated.Access map( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(access, entityUrn); + } + + public com.linkedin.datahub.graphql.generated.Access apply( + @Nonnull final com.linkedin.common.Access access, @Nonnull final Urn entityUrn) { + com.linkedin.datahub.graphql.generated.Access result = + new com.linkedin.datahub.graphql.generated.Access(); + result.setRoles( + access.getRoles().stream() + .map(association -> this.mapRoleAssociation(association, entityUrn)) + .collect(Collectors.toList())); + return result; + } + + private RoleAssociation mapRoleAssociation( + com.linkedin.common.RoleAssociation association, Urn entityUrn) { + RoleAssociation roleAssociation = new RoleAssociation(); + Role role = new Role(); + role.setType(EntityType.ROLE); + role.setUrn(association.getUrn().toString()); + roleAssociation.setRole(role); + roleAssociation.setAssociatedUrn(entityUrn.toString()); + return roleAssociation; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java index 3cb0ec942a4576..df18b7c89fafc0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/rolemetadata/mappers/RoleMapper.java @@ -15,79 +15,77 @@ import com.linkedin.metadata.key.RoleKey; import com.linkedin.role.Actors; import com.linkedin.role.RoleUserArray; - -import javax.annotation.Nonnull; import java.util.List; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class RoleMapper implements ModelMapper { - public static final RoleMapper INSTANCE = new RoleMapper(); - - public static Role map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); + public static final RoleMapper INSTANCE = new RoleMapper(); + + public static Role map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { + final RoleProperties propertiesResult = new RoleProperties(); + propertiesResult.setName(e.getName()); + propertiesResult.setDescription(e.getDescription()); + propertiesResult.setType(e.getType()); + propertiesResult.setRequestUrl(e.getRequestUrl()); + + return propertiesResult; + } + + private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { + RoleUser result = new RoleUser(); + CorpUser corpUser = new CorpUser(); + corpUser.setUrn(provisionedUser.getUser().toString()); + result.setUser(corpUser); + return result; + } + + private static Actor mapActor(Actors actors) { + Actor actor = new Actor(); + actor.setUsers(mapRoleUsers(actors.getUsers())); + return actor; + } + + private static List mapRoleUsers(RoleUserArray users) { + if (users == null) { + return null; } + return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + } - private static RoleProperties mapRoleProperties(final com.linkedin.role.RoleProperties e) { - final RoleProperties propertiesResult = new RoleProperties(); - propertiesResult.setName(e.getName()); - propertiesResult.setDescription(e.getDescription()); - propertiesResult.setType(e.getType()); - propertiesResult.setRequestUrl(e.getRequestUrl()); + @Override + public Role apply(EntityResponse input) { - return propertiesResult; - } + final Role result = new Role(); + final Urn entityUrn = input.getUrn(); - private static RoleUser mapCorpUsers(final com.linkedin.role.RoleUser provisionedUser) { - RoleUser result = new RoleUser(); - CorpUser corpUser = new CorpUser(); - corpUser.setUrn(provisionedUser.getUser().toString()); - result.setUser(corpUser); - return result; - } + result.setUrn(entityUrn.toString()); + result.setType(EntityType.ROLE); - private static Actor mapActor(Actors actors) { - Actor actor = new Actor(); - actor.setUsers(mapRoleUsers(actors.getUsers())); - return actor; - } + final EnvelopedAspectMap aspects = input.getAspects(); - private static List mapRoleUsers(RoleUserArray users) { - if (users == null) { - return null; - } - return users.stream().map(x -> mapCorpUsers(x)).collect(Collectors.toList()); + final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); + if (roleKeyAspect != null) { + result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); + } + final EnvelopedAspect envelopedPropertiesAspect = + aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); + if (envelopedPropertiesAspect != null) { + result.setProperties( + mapRoleProperties( + new com.linkedin.role.RoleProperties(envelopedPropertiesAspect.getValue().data()))); } - @Override - public Role apply(EntityResponse input) { - - - final Role result = new Role(); - final Urn entityUrn = input.getUrn(); - - result.setUrn(entityUrn.toString()); - result.setType(EntityType.ROLE); - - final EnvelopedAspectMap aspects = input.getAspects(); - - final EnvelopedAspect roleKeyAspect = aspects.get(Constants.ROLE_KEY); - if (roleKeyAspect != null) { - result.setId(new RoleKey(roleKeyAspect.getValue().data()).getId()); - } - final EnvelopedAspect envelopedPropertiesAspect = aspects.get(Constants.ROLE_PROPERTIES_ASPECT_NAME); - if (envelopedPropertiesAspect != null) { - result.setProperties(mapRoleProperties( - new com.linkedin.role.RoleProperties( - envelopedPropertiesAspect.getValue().data())) - ); - } - - final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); - if (envelopedUsers != null) { - result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); - } - - return result; + final EnvelopedAspect envelopedUsers = aspects.get(Constants.ROLE_ACTORS_ASPECT_NAME); + if (envelopedUsers != null) { + result.setActors(mapActor(new Actors(envelopedUsers.getValue().data()))); } + + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java index 748753c4e22b13..b543a40cbac410 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/schemafield/SchemaFieldType.java @@ -8,15 +8,15 @@ import com.linkedin.datahub.graphql.generated.SchemaFieldEntity; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; import graphql.execution.DataFetcherResult; - -import javax.annotation.Nonnull; import java.util.List; import java.util.function.Function; import java.util.stream.Collectors; +import javax.annotation.Nonnull; -public class SchemaFieldType implements com.linkedin.datahub.graphql.types.EntityType { +public class SchemaFieldType + implements com.linkedin.datahub.graphql.types.EntityType { - public SchemaFieldType() { } + public SchemaFieldType() {} @Override public EntityType type() { @@ -34,18 +34,17 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List schemaFieldUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List schemaFieldUrns = + urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); try { return schemaFieldUrns.stream() .map(this::mapSchemaFieldUrn) - .map(schemaFieldEntity -> DataFetcherResult.newResult() - .data(schemaFieldEntity) - .build() - ) + .map( + schemaFieldEntity -> + DataFetcherResult.newResult().data(schemaFieldEntity).build()) .collect(Collectors.toList()); } catch (Exception e) { @@ -66,6 +65,4 @@ private SchemaFieldEntity mapSchemaFieldUrn(Urn urn) { throw new RuntimeException("Failed to load schemaField entity", e); } } - } - diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index f79b23033c9958..c56833cc817eb7 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -1,13 +1,15 @@ package com.linkedin.datahub.graphql.types.tag; +import static com.linkedin.metadata.Constants.*; + +import com.datahub.authorization.ConjunctivePrivilegeGroup; +import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; -import com.datahub.authorization.ConjunctivePrivilegeGroup; -import com.datahub.authorization.DisjunctivePrivilegeGroup; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.AutoCompleteResults; import com.linkedin.datahub.graphql.generated.Entity; @@ -26,8 +28,8 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.AutoCompleteResult; -import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.query.SearchFlags; +import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; @@ -44,136 +46,150 @@ import javax.annotation.Nonnull; import javax.annotation.Nullable; -import static com.linkedin.metadata.Constants.*; - - -public class TagType implements com.linkedin.datahub.graphql.types.SearchableEntityType, - MutableType { - - private static final Set FACET_FIELDS = Collections.emptySet(); - - private final EntityClient _entityClient; - - public TagType(final EntityClient entityClient) { - _entityClient = entityClient; - } - - @Override - public Class objectClass() { - return Tag.class; +public class TagType + implements com.linkedin.datahub.graphql.types.SearchableEntityType, + MutableType { + + private static final Set FACET_FIELDS = Collections.emptySet(); + + private final EntityClient _entityClient; + + public TagType(final EntityClient entityClient) { + _entityClient = entityClient; + } + + @Override + public Class objectClass() { + return Tag.class; + } + + @Override + public EntityType type() { + return EntityType.TAG; + } + + @Override + public Function getKeyProvider() { + return Entity::getUrn; + } + + @Override + public Class inputClass() { + return TagUpdateInput.class; + } + + @Override + public List> batchLoad( + final List urns, final QueryContext context) { + + final List tagUrns = urns.stream().map(UrnUtils::getUrn).collect(Collectors.toList()); + + try { + final Map tagMap = + _entityClient.batchGetV2( + TAG_ENTITY_NAME, new HashSet<>(tagUrns), null, context.getAuthentication()); + + final List gmsResults = new ArrayList<>(); + for (Urn urn : tagUrns) { + gmsResults.add(tagMap.getOrDefault(urn, null)); + } + return gmsResults.stream() + .map( + gmsTag -> + gmsTag == null + ? null + : DataFetcherResult.newResult().data(TagMapper.map(gmsTag)).build()) + .collect(Collectors.toList()); + } catch (Exception e) { + throw new RuntimeException("Failed to batch load Tags", e); } - - @Override - public EntityType type() { - return EntityType.TAG; - } - - @Override - public Function getKeyProvider() { - return Entity::getUrn; - } - - @Override - public Class inputClass() { - return TagUpdateInput.class; - } - - @Override - public List> batchLoad(final List urns, final QueryContext context) { - - final List tagUrns = urns.stream() - .map(UrnUtils::getUrn) - .collect(Collectors.toList()); - - try { - final Map tagMap = _entityClient.batchGetV2(TAG_ENTITY_NAME, new HashSet<>(tagUrns), - null, context.getAuthentication()); - - final List gmsResults = new ArrayList<>(); - for (Urn urn : tagUrns) { - gmsResults.add(tagMap.getOrDefault(urn, null)); - } - return gmsResults.stream() - .map(gmsTag -> gmsTag == null ? null - : DataFetcherResult.newResult() - .data(TagMapper.map(gmsTag)) - .build()) - .collect(Collectors.toList()); - } catch (Exception e) { - throw new RuntimeException("Failed to batch load Tags", e); - } - } - - @Override - public SearchResults search(@Nonnull String query, - @Nullable List filters, - int start, - int count, - @Nonnull QueryContext context) throws Exception { - final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); - final SearchResult searchResult = _entityClient.search("tag", query, facetFilters, start, count, - context.getAuthentication(), new SearchFlags().setFulltext(true)); - return UrnSearchResultsMapper.map(searchResult); - } - - @Override - public AutoCompleteResults autoComplete(@Nonnull String query, - @Nullable String field, - @Nullable Filter filters, - int limit, - @Nonnull QueryContext context) throws Exception { - final AutoCompleteResult result = _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); - return AutoCompleteResultsMapper.map(result); - } - - - @Override - public Tag update(@Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) throws Exception { - if (isAuthorized(input, context)) { - final CorpuserUrn actor = CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); - final Collection proposals = TagUpdateInputMapper.map(input, actor); - proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); - try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); - } catch (RemoteInvocationException e) { - throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); - } - - return load(urn, context).getData(); - } - throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + @Override + public SearchResults search( + @Nonnull String query, + @Nullable List filters, + int start, + int count, + @Nonnull QueryContext context) + throws Exception { + final Map facetFilters = ResolverUtils.buildFacetFilters(filters, FACET_FIELDS); + final SearchResult searchResult = + _entityClient.search( + "tag", + query, + facetFilters, + start, + count, + context.getAuthentication(), + new SearchFlags().setFulltext(true)); + return UrnSearchResultsMapper.map(searchResult); + } + + @Override + public AutoCompleteResults autoComplete( + @Nonnull String query, + @Nullable String field, + @Nullable Filter filters, + int limit, + @Nonnull QueryContext context) + throws Exception { + final AutoCompleteResult result = + _entityClient.autoComplete("tag", query, filters, limit, context.getAuthentication()); + return AutoCompleteResultsMapper.map(result); + } + + @Override + public Tag update( + @Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull QueryContext context) + throws Exception { + if (isAuthorized(input, context)) { + final CorpuserUrn actor = + CorpuserUrn.createFromString(context.getAuthentication().getActor().toUrnStr()); + final Collection proposals = TagUpdateInputMapper.map(input, actor); + proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); + try { + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); + } catch (RemoteInvocationException e) { + throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); + } + + return load(urn, context).getData(); } - - private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { - // Decide whether the current principal should be allowed to update the Dataset. - final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); - return AuthorizationUtils.isAuthorized( - context.getAuthorizer(), - context.getAuthentication().getActor().toUrnStr(), - PoliciesConfig.TAG_PRIVILEGES.getResourceType(), - update.getUrn(), - orPrivilegeGroups); + throw new AuthorizationException( + "Unauthorized to perform this action. Please contact your DataHub administrator."); + } + + private boolean isAuthorized(@Nonnull TagUpdateInput update, @Nonnull QueryContext context) { + // Decide whether the current principal should be allowed to update the Dataset. + final DisjunctivePrivilegeGroup orPrivilegeGroups = getAuthorizedPrivileges(update); + return AuthorizationUtils.isAuthorized( + context.getAuthorizer(), + context.getAuthentication().getActor().toUrnStr(), + PoliciesConfig.TAG_PRIVILEGES.getResourceType(), + update.getUrn(), + orPrivilegeGroups); + } + + private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { + + final ConjunctivePrivilegeGroup allPrivilegesGroup = + new ConjunctivePrivilegeGroup( + ImmutableList.of(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType())); + + List specificPrivileges = new ArrayList<>(); + if (updateInput.getOwnership() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); } - - private DisjunctivePrivilegeGroup getAuthorizedPrivileges(final TagUpdateInput updateInput) { - - final ConjunctivePrivilegeGroup allPrivilegesGroup = new ConjunctivePrivilegeGroup(ImmutableList.of( - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() - )); - - List specificPrivileges = new ArrayList<>(); - if (updateInput.getOwnership() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_OWNERS_PRIVILEGE.getType()); - } - if (updateInput.getDescription() != null || updateInput.getName() != null) { - specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); - } - final ConjunctivePrivilegeGroup specificPrivilegeGroup = new ConjunctivePrivilegeGroup(specificPrivileges); - - // If you either have all entity privileges, or have the specific privileges required, you are authorized. - return new DisjunctivePrivilegeGroup(ImmutableList.of( - allPrivilegesGroup, - specificPrivilegeGroup - )); + if (updateInput.getDescription() != null || updateInput.getName() != null) { + specificPrivileges.add(PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType()); } + final ConjunctivePrivilegeGroup specificPrivilegeGroup = + new ConjunctivePrivilegeGroup(specificPrivileges); + + // If you either have all entity privileges, or have the specific privileges required, you are + // authorized. + return new DisjunctivePrivilegeGroup( + ImmutableList.of(allPrivilegesGroup, specificPrivilegeGroup)); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java index f4d5f0a549a0ed..72665535e59808 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/GlobalTagsMapper.java @@ -4,35 +4,36 @@ import com.linkedin.common.TagAssociation; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.Tag; - -import javax.annotation.Nonnull; import java.util.stream.Collectors; +import javax.annotation.Nonnull; public class GlobalTagsMapper { - public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); + public static final GlobalTagsMapper INSTANCE = new GlobalTagsMapper(); - public static com.linkedin.datahub.graphql.generated.GlobalTags map( - @Nonnull final GlobalTags standardTags, - @Nonnull final Urn entityUrn - ) { - return INSTANCE.apply(standardTags, entityUrn); - } + public static com.linkedin.datahub.graphql.generated.GlobalTags map( + @Nonnull final GlobalTags standardTags, @Nonnull final Urn entityUrn) { + return INSTANCE.apply(standardTags, entityUrn); + } - public com.linkedin.datahub.graphql.generated.GlobalTags apply(@Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { - final com.linkedin.datahub.graphql.generated.GlobalTags result = new com.linkedin.datahub.graphql.generated.GlobalTags(); - result.setTags(input.getTags().stream().map(tag -> this.mapTagAssociation(tag, entityUrn)).collect(Collectors.toList())); - return result; - } + public com.linkedin.datahub.graphql.generated.GlobalTags apply( + @Nonnull final GlobalTags input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.GlobalTags result = + new com.linkedin.datahub.graphql.generated.GlobalTags(); + result.setTags( + input.getTags().stream() + .map(tag -> this.mapTagAssociation(tag, entityUrn)) + .collect(Collectors.toList())); + return result; + } - private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( - @Nonnull final TagAssociation input, - @Nonnull final Urn entityUrn - ) { - final com.linkedin.datahub.graphql.generated.TagAssociation result = new com.linkedin.datahub.graphql.generated.TagAssociation(); - final Tag resultTag = new Tag(); - resultTag.setUrn(input.getTag().toString()); - result.setTag(resultTag); - result.setAssociatedUrn(entityUrn.toString()); - return result; - } + private com.linkedin.datahub.graphql.generated.TagAssociation mapTagAssociation( + @Nonnull final TagAssociation input, @Nonnull final Urn entityUrn) { + final com.linkedin.datahub.graphql.generated.TagAssociation result = + new com.linkedin.datahub.graphql.generated.TagAssociation(); + final Tag resultTag = new Tag(); + resultTag.setUrn(input.getTag().toString()); + result.setTag(resultTag); + result.setAssociatedUrn(entityUrn.toString()); + return result; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java index 775c123070a80d..3792a423760046 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagAssociationUpdateMapper.java @@ -4,27 +4,28 @@ import com.linkedin.common.urn.TagUrn; import com.linkedin.datahub.graphql.generated.TagAssociationUpdate; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; - -import javax.annotation.Nonnull; import java.net.URISyntaxException; +import javax.annotation.Nonnull; -public class TagAssociationUpdateMapper implements ModelMapper { +public class TagAssociationUpdateMapper + implements ModelMapper { - public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); + public static final TagAssociationUpdateMapper INSTANCE = new TagAssociationUpdateMapper(); - public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { - return INSTANCE.apply(tagAssociationUpdate); - } + public static TagAssociation map(@Nonnull final TagAssociationUpdate tagAssociationUpdate) { + return INSTANCE.apply(tagAssociationUpdate); + } - public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { - final TagAssociation output = new TagAssociation(); - try { - output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); - } catch (URISyntaxException e) { - throw new RuntimeException(String.format("Failed to update tag with urn %s, invalid urn", - tagAssociationUpdate.getTag().getUrn())); - } - return output; + public TagAssociation apply(final TagAssociationUpdate tagAssociationUpdate) { + final TagAssociation output = new TagAssociation(); + try { + output.setTag(TagUrn.createFromString(tagAssociationUpdate.getTag().getUrn())); + } catch (URISyntaxException e) { + throw new RuntimeException( + String.format( + "Failed to update tag with urn %s, invalid urn", + tagAssociationUpdate.getTag().getUrn())); } - + return output; + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java index 43736b412b0045..d6ce24582678d8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.Ownership; import com.linkedin.common.urn.Urn; import com.linkedin.data.DataMap; @@ -16,63 +18,61 @@ import com.linkedin.tag.TagProperties; import javax.annotation.Nonnull; -import static com.linkedin.metadata.Constants.*; - - /** * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. * - * To be replaced by auto-generated mappers implementations + *

To be replaced by auto-generated mappers implementations */ public class TagMapper implements ModelMapper { - public static final TagMapper INSTANCE = new TagMapper(); + public static final TagMapper INSTANCE = new TagMapper(); - public static Tag map(@Nonnull final EntityResponse entityResponse) { - return INSTANCE.apply(entityResponse); - } - - @Override - public Tag apply(@Nonnull final EntityResponse entityResponse) { - final Tag result = new Tag(); - Urn entityUrn = entityResponse.getUrn(); - result.setUrn(entityResponse.getUrn().toString()); - result.setType(EntityType.TAG); + public static Tag map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } - final String legacyName = entityResponse.getUrn().getId(); - result.setName(legacyName); + @Override + public Tag apply(@Nonnull final EntityResponse entityResponse) { + final Tag result = new Tag(); + Urn entityUrn = entityResponse.getUrn(); + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.TAG); - EnvelopedAspectMap aspectMap = entityResponse.getAspects(); - MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); - mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); - mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); - mappingHelper.mapToResult(OWNERSHIP_ASPECT_NAME, (tag, dataMap) -> - tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); + final String legacyName = entityResponse.getUrn().getId(); + result.setName(legacyName); - if (result.getProperties() != null && result.getProperties().getName() == null) { - result.getProperties().setName(legacyName); - } + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(TAG_KEY_ASPECT_NAME, this::mapTagKey); + mappingHelper.mapToResult(TAG_PROPERTIES_ASPECT_NAME, this::mapTagProperties); + mappingHelper.mapToResult( + OWNERSHIP_ASPECT_NAME, + (tag, dataMap) -> tag.setOwnership(OwnershipMapper.map(new Ownership(dataMap), entityUrn))); - return mappingHelper.getResult(); + if (result.getProperties() != null && result.getProperties().getName() == null) { + result.getProperties().setName(legacyName); } - private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - TagKey tagKey = new TagKey(dataMap); - tag.setName(tagKey.getName()); - } + return mappingHelper.getResult(); + } + + private void mapTagKey(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + TagKey tagKey = new TagKey(dataMap); + tag.setName(tagKey.getName()); + } - private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { - final TagProperties properties = new TagProperties(dataMap); - final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = - new com.linkedin.datahub.graphql.generated.TagProperties.Builder() - .setColorHex(properties.getColorHex(GetMode.DEFAULT)) - .setName(properties.getName(GetMode.DEFAULT)) - .setDescription(properties.getDescription(GetMode.DEFAULT)) - .build(); - tag.setProperties(graphQlProperties); - // Set deprecated top-level description field. - if (properties.hasDescription()) { - tag.setDescription(properties.getDescription()); - } + private void mapTagProperties(@Nonnull Tag tag, @Nonnull DataMap dataMap) { + final TagProperties properties = new TagProperties(dataMap); + final com.linkedin.datahub.graphql.generated.TagProperties graphQlProperties = + new com.linkedin.datahub.graphql.generated.TagProperties.Builder() + .setColorHex(properties.getColorHex(GetMode.DEFAULT)) + .setName(properties.getName(GetMode.DEFAULT)) + .setDescription(properties.getDescription(GetMode.DEFAULT)) + .build(); + tag.setProperties(graphQlProperties); + // Set deprecated top-level description field. + if (properties.hasDescription()) { + tag.setDescription(properties.getDescription()); } + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java index 505dd0d36954b3..316994881ccfec 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/mappers/TagUpdateInputMapper.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.tag.mappers; +import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; @@ -19,24 +22,19 @@ import java.util.Collection; import javax.annotation.Nonnull; -import static com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils.*; -import static com.linkedin.metadata.Constants.*; - - -public class TagUpdateInputMapper implements InputModelMapper, Urn> { +public class TagUpdateInputMapper + implements InputModelMapper, Urn> { public static final TagUpdateInputMapper INSTANCE = new TagUpdateInputMapper(); public static Collection map( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { return INSTANCE.apply(tagUpdate, actor); } @Override public Collection apply( - @Nonnull final TagUpdateInput tagUpdate, - @Nonnull final Urn actor) { + @Nonnull final TagUpdateInput tagUpdate, @Nonnull final Urn actor) { final Collection proposals = new ArrayList<>(2); final UpdateMappingHelper updateMappingHelper = new UpdateMappingHelper(TAG_ENTITY_NAME); @@ -59,9 +57,10 @@ public Collection apply( TagProperties tagProperties = new TagProperties(); tagProperties.setName(tagUpdate.getName()); tagProperties.setDescription(tagUpdate.getDescription()); - proposals.add(updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); + proposals.add( + updateMappingHelper.aspectToProposal(tagProperties, TAG_PROPERTIES_ASPECT_NAME)); } return proposals; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java index ddc9f33b25516f..be67d174219178 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestMapper.java @@ -1,15 +1,14 @@ package com.linkedin.datahub.graphql.types.test; -import com.linkedin.datahub.graphql.generated.TestDefinition; -import com.linkedin.test.TestInfo; import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; +import com.linkedin.datahub.graphql.generated.TestDefinition; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; - +import com.linkedin.test.TestInfo; public class TestMapper { @@ -29,12 +28,11 @@ public static Test map(final EntityResponse entityResponse) { result.setName(testInfo.getName()); result.setDescription(testInfo.getDescription()); result.setDefinition(new TestDefinition(testInfo.getDefinition().getJson())); - } else { + } else { return null; } return result; } - private TestMapper() { - } -} \ No newline at end of file + private TestMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java index 4b7df8a0d23d36..eefcc356c22a3c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/test/TestType.java @@ -3,9 +3,9 @@ import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.datahub.graphql.generated.Entity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Test; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; @@ -20,15 +20,12 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - public class TestType implements com.linkedin.datahub.graphql.types.EntityType { - static final Set ASPECTS_TO_FETCH = ImmutableSet.of( - Constants.TEST_INFO_ASPECT_NAME - ); + static final Set ASPECTS_TO_FETCH = ImmutableSet.of(Constants.TEST_INFO_ASPECT_NAME); private final EntityClient _entityClient; - public TestType(final EntityClient entityClient) { + public TestType(final EntityClient entityClient) { _entityClient = entityClient; } @@ -48,28 +45,28 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) throws Exception { - final List testUrns = urns.stream() - .map(this::getUrn) - .collect(Collectors.toList()); + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { + final List testUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { - final Map entities = _entityClient.batchGetV2( - Constants.TEST_ENTITY_NAME, - new HashSet<>(testUrns), - ASPECTS_TO_FETCH, - context.getAuthentication()); + final Map entities = + _entityClient.batchGetV2( + Constants.TEST_ENTITY_NAME, + new HashSet<>(testUrns), + ASPECTS_TO_FETCH, + context.getAuthentication()); final List gmsResults = new ArrayList<>(); for (Urn urn : testUrns) { gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> - gmsResult == null ? null : DataFetcherResult.newResult() - .data(TestMapper.map(gmsResult)) - .build() - ) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult().data(TestMapper.map(gmsResult)).build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Tests", e); @@ -83,4 +80,4 @@ private Urn getUrn(final String urnStr) { throw new RuntimeException(String.format("Failed to convert urn string %s into Urn", urnStr)); } } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java index 7812282d0c1e52..02de39ffc644c0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaBlameMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ChangeOperationType; import com.linkedin.datahub.graphql.generated.GetSchemaBlameResult; @@ -25,15 +27,14 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame structs for every schema +// Class for converting ChangeTransactions received from the Timeline API to SchemaFieldBlame +// structs for every schema // at every semantic version. @Slf4j public class SchemaBlameMapper { - public static GetSchemaBlameResult map(@Nonnull final List changeTransactions, + public static GetSchemaBlameResult map( + @Nonnull final List changeTransactions, @Nullable final String versionCutoff) { final GetSchemaBlameResult result = new GetSchemaBlameResult(); if (changeTransactions.isEmpty()) { @@ -46,7 +47,8 @@ public static GetSchemaBlameResult map(@Nonnull final List ch final String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - final String semanticVersionFilterString = versionCutoff == null ? latestSemanticVersionString : versionCutoff; + final String semanticVersionFilterString = + versionCutoff == null ? latestSemanticVersionString : versionCutoff; final Optional semanticVersionFilterOptional = createSemanticVersion(semanticVersionFilterString); if (semanticVersionFilterOptional.isEmpty()) { @@ -55,25 +57,30 @@ public static GetSchemaBlameResult map(@Nonnull final List ch final ComparableVersion semanticVersionFilter = semanticVersionFilterOptional.get(); - final List reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .filter(semanticVersionChangeTransactionPair -> - semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) <= 0) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + final List reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .filter( + semanticVersionChangeTransactionPair -> + semanticVersionChangeTransactionPair.getFirst().compareTo(semanticVersionFilter) + <= 0) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); if (reversedChangeTransactions.isEmpty()) { return result; } - final String selectedSemanticVersion = truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); + final String selectedSemanticVersion = + truncateSemanticVersion(reversedChangeTransactions.get(0).getSemVer()); final long selectedSemanticVersionTimestamp = reversedChangeTransactions.get(0).getTimestamp(); final String selectedVersionStamp = reversedChangeTransactions.get(0).getVersionStamp(); result.setVersion( - new SemanticVersionStruct(selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); + new SemanticVersionStruct( + selectedSemanticVersion, selectedSemanticVersionTimestamp, selectedVersionStamp)); for (ChangeTransaction changeTransaction : reversedChangeTransactions) { for (ChangeEvent changeEvent : changeTransaction.getChangeEvents()) { @@ -90,8 +97,10 @@ public static GetSchemaBlameResult map(@Nonnull final List ch SchemaFieldKey schemaFieldKey; try { - schemaFieldKey = (SchemaFieldKey) EntityKeyUtils.convertUrnToEntityKeyInternal(Urn.createFromString(schemaUrn), - new SchemaFieldKey().schema()); + schemaFieldKey = + (SchemaFieldKey) + EntityKeyUtils.convertUrnToEntityKeyInternal( + Urn.createFromString(schemaUrn), new SchemaFieldKey().schema()); } catch (Exception e) { log.debug(String.format("Could not generate schema urn for %s", schemaUrn)); continue; @@ -101,7 +110,10 @@ public static GetSchemaBlameResult map(@Nonnull final List ch schemaFieldBlame.setFieldPath(fieldPath); final SchemaFieldChange schemaFieldChange = - getLastSchemaFieldChange(changeEvent, changeTransaction.getTimestamp(), changeTransaction.getSemVer(), + getLastSchemaFieldChange( + changeEvent, + changeTransaction.getTimestamp(), + changeTransaction.getSemVer(), changeTransaction.getVersionStamp()); schemaFieldBlame.setSchemaFieldChange(schemaFieldChange); @@ -109,15 +121,17 @@ public static GetSchemaBlameResult map(@Nonnull final List ch } } - result.setSchemaFieldBlameList(schemaBlameMap.values() - .stream() - .filter(schemaFieldBlame -> !schemaFieldBlame.getSchemaFieldChange() - .getChangeType() - .equals(ChangeOperationType.REMOVE)) - .collect(Collectors.toList())); + result.setSchemaFieldBlameList( + schemaBlameMap.values().stream() + .filter( + schemaFieldBlame -> + !schemaFieldBlame + .getSchemaFieldChange() + .getChangeType() + .equals(ChangeOperationType.REMOVE)) + .collect(Collectors.toList())); return result; } - private SchemaBlameMapper() { - } -} \ No newline at end of file + private SchemaBlameMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java index 249957b1a12621..295ca0856821c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/mappers/SchemaVersionListMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.timeline.mappers; +import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; + import com.linkedin.datahub.graphql.generated.GetSchemaVersionListResult; import com.linkedin.datahub.graphql.generated.SemanticVersionStruct; import com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils; @@ -12,10 +14,8 @@ import java.util.stream.Collectors; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.types.timeline.utils.TimelineUtils.*; - - -// Class for converting ChangeTransactions received from the Timeline API to list of schema versions. +// Class for converting ChangeTransactions received from the Timeline API to list of schema +// versions. @Slf4j public class SchemaVersionListMapper { @@ -29,28 +29,36 @@ public static GetSchemaVersionListResult map(List changeTrans String latestSemanticVersionString = truncateSemanticVersion(changeTransactions.get(changeTransactions.size() - 1).getSemVer()); - long latestSemanticVersionTimestamp = changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); - String latestVersionStamp = changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); + long latestSemanticVersionTimestamp = + changeTransactions.get(changeTransactions.size() - 1).getTimestamp(); + String latestVersionStamp = + changeTransactions.get(changeTransactions.size() - 1).getVersionStamp(); result.setLatestVersion( - new SemanticVersionStruct(latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); + new SemanticVersionStruct( + latestSemanticVersionString, latestSemanticVersionTimestamp, latestVersionStamp)); - List reversedChangeTransactions = changeTransactions.stream() - .map(TimelineUtils::semanticVersionChangeTransactionPair) - .filter(Optional::isPresent) - .map(Optional::get) - .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) - .map(Pair::getSecond) - .collect(Collectors.toList()); + List reversedChangeTransactions = + changeTransactions.stream() + .map(TimelineUtils::semanticVersionChangeTransactionPair) + .filter(Optional::isPresent) + .map(Optional::get) + .sorted(Collections.reverseOrder(Comparator.comparing(Pair::getFirst))) + .map(Pair::getSecond) + .collect(Collectors.toList()); - List semanticVersionStructList = reversedChangeTransactions.stream() - .map(changeTransaction -> new SemanticVersionStruct(truncateSemanticVersion(changeTransaction.getSemVer()), - changeTransaction.getTimestamp(), changeTransaction.getVersionStamp())) - .collect(Collectors.toList()); + List semanticVersionStructList = + reversedChangeTransactions.stream() + .map( + changeTransaction -> + new SemanticVersionStruct( + truncateSemanticVersion(changeTransaction.getSemVer()), + changeTransaction.getTimestamp(), + changeTransaction.getVersionStamp())) + .collect(Collectors.toList()); result.setSemanticVersionList(semanticVersionStructList); return result; } - private SchemaVersionListMapper() { - } -} \ No newline at end of file + private SchemaVersionListMapper() {} +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java index 175cf678117f01..37acfe3da0f9f0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/timeline/utils/TimelineUtils.java @@ -9,13 +9,13 @@ import lombok.extern.slf4j.Slf4j; import org.apache.maven.artifact.versioning.ComparableVersion; - @Slf4j public class TimelineUtils { - public static Optional> semanticVersionChangeTransactionPair( - ChangeTransaction changeTransaction) { - Optional semanticVersion = createSemanticVersion(changeTransaction.getSemVer()); + public static Optional> + semanticVersionChangeTransactionPair(ChangeTransaction changeTransaction) { + Optional semanticVersion = + createSemanticVersion(changeTransaction.getSemVer()); return semanticVersion.map(version -> Pair.of(version, changeTransaction)); } @@ -29,21 +29,24 @@ public static Optional createSemanticVersion(String semanticV } } - // The SemanticVersion is currently returned from the ChangeTransactions in the format "x.y.z-computed". This function + // The SemanticVersion is currently returned from the ChangeTransactions in the format + // "x.y.z-computed". This function // removes the suffix "computed". public static String truncateSemanticVersion(String semanticVersion) { String suffix = "-computed"; - return semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + return semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; } - public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent, long timestamp, - String semanticVersion, String versionStamp) { + public static SchemaFieldChange getLastSchemaFieldChange( + ChangeEvent changeEvent, long timestamp, String semanticVersion, String versionStamp) { SchemaFieldChange schemaFieldChange = new SchemaFieldChange(); schemaFieldChange.setTimestampMillis(timestamp); schemaFieldChange.setLastSemanticVersion(truncateSemanticVersion(semanticVersion)); schemaFieldChange.setChangeType( - ChangeOperationType.valueOf(ChangeOperationType.class, changeEvent.getOperation().toString())); + ChangeOperationType.valueOf( + ChangeOperationType.class, changeEvent.getOperation().toString())); schemaFieldChange.setVersionStamp(versionStamp); String translatedChangeOperationType; @@ -65,15 +68,16 @@ public static SchemaFieldChange getLastSchemaFieldChange(ChangeEvent changeEvent String suffix = "-computed"; String translatedSemanticVersion = - semanticVersion.endsWith(suffix) ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) + semanticVersion.endsWith(suffix) + ? semanticVersion.substring(0, semanticVersion.lastIndexOf(suffix)) : semanticVersion; - String lastSchemaFieldChange = String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); + String lastSchemaFieldChange = + String.format("%s in v%s", translatedChangeOperationType, translatedSemanticVersion); schemaFieldChange.setLastSchemaFieldChange(lastSchemaFieldChange); return schemaFieldChange; } - private TimelineUtils() { - } + private TimelineUtils() {} } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java index 3bf84d21a32158..e4e67c86f1ae6b 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/FieldUsageCountsMapper.java @@ -4,12 +4,13 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class FieldUsageCountsMapper implements ModelMapper { +public class FieldUsageCountsMapper + implements ModelMapper { public static final FieldUsageCountsMapper INSTANCE = new FieldUsageCountsMapper(); - public static FieldUsageCounts map(@Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { + public static FieldUsageCounts map( + @Nonnull final com.linkedin.usage.FieldUsageCounts usageCounts) { return INSTANCE.apply(usageCounts); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java index 453ae97d403067..3449c6782a46ba 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMapper.java @@ -5,18 +5,19 @@ import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UsageAggregationMapper implements - ModelMapper { +public class UsageAggregationMapper + implements ModelMapper { public static final UsageAggregationMapper INSTANCE = new UsageAggregationMapper(); - public static UsageAggregation map(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public static UsageAggregation map( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { return INSTANCE.apply(pdlUsageAggregation); } @Override - public UsageAggregation apply(@Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { + public UsageAggregation apply( + @Nonnull final com.linkedin.usage.UsageAggregation pdlUsageAggregation) { UsageAggregation result = new UsageAggregation(); result.setBucket(pdlUsageAggregation.getBucket()); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java index 697b15d57e4e48..ff9f6fd5c48551 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageAggregationMetricsMapper.java @@ -5,31 +5,34 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageAggregationMetricsMapper implements - ModelMapper { +public class UsageAggregationMetricsMapper + implements ModelMapper { public static final UsageAggregationMetricsMapper INSTANCE = new UsageAggregationMetricsMapper(); - public static UsageAggregationMetrics map(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public static UsageAggregationMetrics map( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { return INSTANCE.apply(usageAggregationMetrics); } @Override - public UsageAggregationMetrics apply(@Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { + public UsageAggregationMetrics apply( + @Nonnull final com.linkedin.usage.UsageAggregationMetrics usageAggregationMetrics) { UsageAggregationMetrics result = new UsageAggregationMetrics(); result.setTotalSqlQueries(usageAggregationMetrics.getTotalSqlQueries()); result.setUniqueUserCount(usageAggregationMetrics.getUniqueUserCount()); result.setTopSqlQueries(usageAggregationMetrics.getTopSqlQueries()); if (usageAggregationMetrics.hasFields()) { result.setFields( - usageAggregationMetrics.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + usageAggregationMetrics.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (usageAggregationMetrics.hasUsers()) { - result.setUsers(usageAggregationMetrics.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + usageAggregationMetrics.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java index ba3b86b72af8b9..63fe051b7ede96 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultAggregationMapper.java @@ -5,30 +5,35 @@ import java.util.stream.Collectors; import javax.annotation.Nonnull; +public class UsageQueryResultAggregationMapper + implements ModelMapper< + com.linkedin.usage.UsageQueryResultAggregations, UsageQueryResultAggregations> { -public class UsageQueryResultAggregationMapper implements - ModelMapper { + public static final UsageQueryResultAggregationMapper INSTANCE = + new UsageQueryResultAggregationMapper(); - public static final UsageQueryResultAggregationMapper INSTANCE = new UsageQueryResultAggregationMapper(); - - public static UsageQueryResultAggregations map(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public static UsageQueryResultAggregations map( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UsageQueryResultAggregations apply(@Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { + public UsageQueryResultAggregations apply( + @Nonnull final com.linkedin.usage.UsageQueryResultAggregations pdlUsageResultAggregations) { UsageQueryResultAggregations result = new UsageQueryResultAggregations(); result.setTotalSqlQueries(pdlUsageResultAggregations.getTotalSqlQueries()); result.setUniqueUserCount(pdlUsageResultAggregations.getUniqueUserCount()); if (pdlUsageResultAggregations.hasFields()) { result.setFields( - pdlUsageResultAggregations.getFields().stream().map(FieldUsageCountsMapper::map).collect(Collectors.toList())); + pdlUsageResultAggregations.getFields().stream() + .map(FieldUsageCountsMapper::map) + .collect(Collectors.toList())); } if (pdlUsageResultAggregations.hasUsers()) { - result.setUsers(pdlUsageResultAggregations.getUsers() - .stream() - .map(aggregation -> UserUsageCountsMapper.map(aggregation)) - .collect(Collectors.toList())); + result.setUsers( + pdlUsageResultAggregations.getUsers().stream() + .map(aggregation -> UserUsageCountsMapper.map(aggregation)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java index f54259180c7392..444605cd99377a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UsageQueryResultMapper.java @@ -1,17 +1,17 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.UsageQueryResult; - import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import java.util.stream.Collectors; import javax.annotation.Nonnull; - -public class UsageQueryResultMapper implements ModelMapper { +public class UsageQueryResultMapper + implements ModelMapper { public static final UsageQueryResultMapper INSTANCE = new UsageQueryResultMapper(); - public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { + public static UsageQueryResult map( + @Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { return INSTANCE.apply(pdlUsageResult); } @@ -19,11 +19,14 @@ public static UsageQueryResult map(@Nonnull final com.linkedin.usage.UsageQueryR public UsageQueryResult apply(@Nonnull final com.linkedin.usage.UsageQueryResult pdlUsageResult) { UsageQueryResult result = new UsageQueryResult(); if (pdlUsageResult.hasAggregations()) { - result.setAggregations(UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); + result.setAggregations( + UsageQueryResultAggregationMapper.map(pdlUsageResult.getAggregations())); } if (pdlUsageResult.hasBuckets()) { - result.setBuckets(pdlUsageResult.getBuckets().stream().map( - bucket -> UsageAggregationMapper.map(bucket)).collect(Collectors.toList())); + result.setBuckets( + pdlUsageResult.getBuckets().stream() + .map(bucket -> UsageAggregationMapper.map(bucket)) + .collect(Collectors.toList())); } return result; } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java index b525a761841e30..014003dd865540 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/usage/UserUsageCountsMapper.java @@ -1,23 +1,23 @@ package com.linkedin.datahub.graphql.types.usage; import com.linkedin.datahub.graphql.generated.CorpUser; - import com.linkedin.datahub.graphql.generated.UserUsageCounts; import com.linkedin.datahub.graphql.types.mappers.ModelMapper; import javax.annotation.Nonnull; - -public class UserUsageCountsMapper implements - ModelMapper { +public class UserUsageCountsMapper + implements ModelMapper { public static final UserUsageCountsMapper INSTANCE = new UserUsageCountsMapper(); - public static UserUsageCounts map(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public static UserUsageCounts map( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { return INSTANCE.apply(pdlUsageResultAggregations); } @Override - public UserUsageCounts apply(@Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { + public UserUsageCounts apply( + @Nonnull final com.linkedin.usage.UserUsageCounts pdlUsageResultAggregations) { UserUsageCounts result = new UserUsageCounts(); if (pdlUsageResultAggregations.hasUser()) { CorpUser partialUser = new CorpUser(); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java index f6c348937c7a55..8ea06f46d51339 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewMapper.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.data.DataMap; import com.linkedin.datahub.graphql.generated.DataHubView; import com.linkedin.datahub.graphql.generated.DataHubViewDefinition; @@ -24,8 +26,6 @@ import javax.annotation.Nullable; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.metadata.Constants.*; - @Slf4j public class DataHubViewMapper implements ModelMapper { @@ -57,20 +57,26 @@ private void mapDataHubViewInfo(@Nonnull final DataHubView view, @Nonnull final } @Nonnull - private DataHubViewDefinition mapViewDefinition(@Nonnull final com.linkedin.view.DataHubViewDefinition definition) { + private DataHubViewDefinition mapViewDefinition( + @Nonnull final com.linkedin.view.DataHubViewDefinition definition) { final DataHubViewDefinition result = new DataHubViewDefinition(); result.setFilter(mapFilter(definition.getFilter())); - result.setEntityTypes(definition.getEntityTypes().stream().map(EntityTypeMapper::getType).collect( - Collectors.toList())); + result.setEntityTypes( + definition.getEntityTypes().stream() + .map(EntityTypeMapper::getType) + .collect(Collectors.toList())); return result; } @Nullable - private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.filter.Filter filter) { - // This assumes that people DO NOT emit Views on their own, since we expect that the Filter structure is within + private DataHubViewFilter mapFilter( + @Nonnull final com.linkedin.metadata.query.filter.Filter filter) { + // This assumes that people DO NOT emit Views on their own, since we expect that the Filter + // structure is within // a finite set of possibilities. // - // If we find a View that was ingested manually and malformed, then we log that and return a default. + // If we find a View that was ingested manually and malformed, then we log that and return a + // default. final DataHubViewFilter result = new DataHubViewFilter(); if (filter.hasOr() && filter.getOr().size() == 1) { // Then we are looking at an AND with multiple sub conditions. @@ -84,9 +90,7 @@ private DataHubViewFilter mapFilter(@Nonnull final com.linkedin.metadata.query.f return result; } - /** - * This simply converts a List of leaf criterion into the FacetFiler equivalent. - */ + /** This simply converts a List of leaf criterion into the FacetFiler equivalent. */ @Nonnull private List mapAndFilters(@Nullable final List ands) { // If the array is missing, return empty array. @@ -98,9 +102,9 @@ private List mapAndFilters(@Nullable final List ands) { } /** - * This converts a list of Conjunctive Criterion into a flattened list - * of FacetFilters. This method makes the assumption that WE (our GraphQL API) - * has minted the View and that each or criterion contains at maximum one nested condition. + * This converts a list of Conjunctive Criterion into a flattened list of FacetFilters. This + * method makes the assumption that WE (our GraphQL API) has minted the View and that each or + * criterion contains at maximum one nested condition. */ @Nonnull private List mapOrFilters(@Nullable final List ors) { @@ -109,8 +113,10 @@ private List mapOrFilters(@Nullable final List or.hasAnd() && or.getAnd().size() > 1)) { - log.warn(String.format( - "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", ors)); + log.warn( + String.format( + "Detected a View with a malformed filter clause. OR view has children conjunctions with more than one Criterion. Returning empty filters. %s", + ors)); return Collections.emptyList(); } // It is assumed that in this case, the view is a flat list of ORs. Thus, we filter diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java index 21a80e3f900d41..9b3680bde9b2b2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/view/DataHubViewType.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.view; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -20,11 +22,9 @@ import javax.annotation.Nonnull; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor -public class DataHubViewType implements com.linkedin.datahub.graphql.types.EntityType { +public class DataHubViewType + implements com.linkedin.datahub.graphql.types.EntityType { public static final Set ASPECTS_TO_FETCH = ImmutableSet.of(DATAHUB_VIEW_INFO_ASPECT_NAME); private final EntityClient _entityClient; @@ -44,13 +44,16 @@ public Class objectClass() { } @Override - public List> batchLoad(@Nonnull List urns, @Nonnull QueryContext context) - throws Exception { + public List> batchLoad( + @Nonnull List urns, @Nonnull QueryContext context) throws Exception { final List viewUrns = urns.stream().map(this::getUrn).collect(Collectors.toList()); try { final Map entities = - _entityClient.batchGetV2(DATAHUB_VIEW_ENTITY_NAME, new HashSet<>(viewUrns), ASPECTS_TO_FETCH, + _entityClient.batchGetV2( + DATAHUB_VIEW_ENTITY_NAME, + new HashSet<>(viewUrns), + ASPECTS_TO_FETCH, context.getAuthentication()); final List gmsResults = new ArrayList<>(); @@ -58,8 +61,13 @@ public List> batchLoad(@Nonnull List urns gmsResults.add(entities.getOrDefault(urn, null)); } return gmsResults.stream() - .map(gmsResult -> gmsResult == null ? null - : DataFetcherResult.newResult().data(DataHubViewMapper.map(gmsResult)).build()) + .map( + gmsResult -> + gmsResult == null + ? null + : DataFetcherResult.newResult() + .data(DataHubViewMapper.map(gmsResult)) + .build()) .collect(Collectors.toList()); } catch (Exception e) { throw new RuntimeException("Failed to batch load Views", e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java index bb9de5fb96802b..677ad8afbaca31 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/DateUtil.java @@ -5,39 +5,34 @@ import org.joda.time.DateTimeConstants; public class DateUtil { - public DateTime getNow() { - return DateTime.now(); - } - - public DateTime getStartOfNextWeek() { - return setTimeToZero(getNow() - .withDayOfWeek(DateTimeConstants.SUNDAY) - .plusDays(1)); - } - - public DateTime getStartOfNextMonth() { - return setTimeToZero(getNow() - .withDayOfMonth(1) - .plusMonths(1)); - } - - public DateTime setTimeToZero(DateTime input) { - return input.withHourOfDay(0) - .withMinuteOfHour(0) - .withSecondOfMinute(0) - .withMillisOfDay(0); - } - - public DateTime getTomorrowStart() { - return setTimeToZero(getNow().plusDays(1)); - } - - public DateRange getTrailingWeekDateRange() { - final DateTime todayEnd = getTomorrowStart().minusMillis(1); - final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); - return new DateRange( - String.valueOf(aWeekAgoStart.getMillis()), - String.valueOf(todayEnd.getMillis()) - ); - } + public DateTime getNow() { + return DateTime.now(); + } + + public DateTime getStartOfNextWeek() { + return setTimeToZero(getNow().withDayOfWeek(DateTimeConstants.SUNDAY).plusDays(1)); + } + + public DateTime getStartOfThisMonth() { + return setTimeToZero(getNow().withDayOfMonth(1)); + } + + public DateTime getStartOfNextMonth() { + return setTimeToZero(getNow().withDayOfMonth(1).plusMonths(1)); + } + + public DateTime setTimeToZero(DateTime input) { + return input.withHourOfDay(0).withMinuteOfHour(0).withSecondOfMinute(0).withMillisOfDay(0); + } + + public DateTime getTomorrowStart() { + return setTimeToZero(getNow().plusDays(1)); + } + + public DateRange getTrailingWeekDateRange() { + final DateTime todayEnd = getTomorrowStart().minusMillis(1); + final DateTime aWeekAgoStart = todayEnd.minusWeeks(1).plusMillis(1); + return new DateRange( + String.valueOf(aWeekAgoStart.getMillis()), String.valueOf(todayEnd.getMillis())); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java index 7f90071c6770cc..904db311d34d06 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/util/SearchInsightsUtil.java @@ -6,7 +6,6 @@ import java.util.List; import javax.annotation.Nullable; - public class SearchInsightsUtil { public static List getInsightsFromFeatures(@Nullable final DoubleMap features) { @@ -18,5 +17,5 @@ public static List getInsightsFromFeatures(@Nullable final Double return Collections.emptyList(); } - private SearchInsightsUtil() { } + private SearchInsightsUtil() {} } diff --git a/datahub-graphql-core/src/main/resources/app.graphql b/datahub-graphql-core/src/main/resources/app.graphql index 075a3b0fac43bc..52451e195ee841 100644 --- a/datahub-graphql-core/src/main/resources/app.graphql +++ b/datahub-graphql-core/src/main/resources/app.graphql @@ -437,6 +437,11 @@ type FeatureFlagsConfig { """ showBrowseV2: Boolean! + """ + Whether browse v2 is platform mode, which means that platforms are displayed instead of entity types at the root. + """ + platformBrowseV2: Boolean! + """ Whether we should show CTAs in the UI related to moving to Managed DataHub by Acryl. """ diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 4f3769d9088156..307c7f7b383e30 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -1789,12 +1789,13 @@ type DatasetProperties { """ Last Modified timestamp millis associated with the Dataset """ - lastModified: Long + lastModified: AuditStamp! """ - Actor associated with the Dataset's lastModified timestamp + Actor associated with the Dataset's lastModified timestamp. + Deprecated - Use lastModified.actor instead. """ - lastModifiedActor: String + lastModifiedActor: String @deprecated } @@ -11054,6 +11055,10 @@ input CreateDataProductInput { The primary key of the Domain """ domainUrn: String! + """ + An optional id for the new data product + """ + id: String } """ @@ -11234,4 +11239,4 @@ input UpdateOwnershipTypeInput { The description of the Custom Ownership Type """ description: String -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index e0cde5a2db9f99..8f2377edb546e0 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -1176,9 +1176,14 @@ Input required for browse queries """ input BrowseV2Input { """ - The browse entity type + The browse entity type - deprecated use types instead """ - type: EntityType! + type: EntityType + + """ + The browse entity type - deprecated use types instead. If not provided, all types will be used. + """ + types: [EntityType!] """ The browse path V2 - a list with each entry being part of the browse path V2 diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index 606123cac926de..69cd73ecd7d68d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -14,17 +14,17 @@ import com.linkedin.metadata.models.registry.ConfigEntityRegistry; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.mxe.MetadataChangeProposal; -import org.mockito.Mockito; - import java.util.List; - +import org.mockito.Mockito; public class TestUtils { public static EntityService getMockEntityService() { - PathSpecBasedSchemaAnnotationVisitor.class.getClassLoader() + PathSpecBasedSchemaAnnotationVisitor.class + .getClassLoader() .setClassAssertionStatus(PathSpecBasedSchemaAnnotationVisitor.class.getName(), false); - EntityRegistry registry = new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); + EntityRegistry registry = + new ConfigEntityRegistry(TestUtils.class.getResourceAsStream("/test-entity-registry.yaml")); EntityService mockEntityService = Mockito.mock(EntityService.class); Mockito.when(mockEntityService.getEntityRegistry()).thenReturn(registry); return mockEntityService; @@ -44,9 +44,10 @@ public static QueryContext getMockAllowContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -60,9 +61,10 @@ public static QueryContext getMockAllowContext(String actorUrn, AuthorizationReq Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -80,9 +82,10 @@ public static QueryContext getMockDenyContext(String actorUrn) { Mockito.when(mockAuthorizer.authorize(Mockito.any())).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } @@ -96,55 +99,54 @@ public static QueryContext getMockDenyContext(String actorUrn, AuthorizationRequ Mockito.when(mockAuthorizer.authorize(Mockito.eq(request))).thenReturn(result); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn( + new Authentication( + new Actor(ActorType.USER, UrnUtils.getUrn(actorUrn).getId()), "creds")); return mockContext; } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { verifyIngestProposal(mockService, numberOfInvocations, List.of(proposal)); } - public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, List proposals) { - AspectsBatchImpl batch = AspectsBatchImpl.builder() - .mcps(proposals, mockService.getEntityRegistry()) - .build(); - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(batch), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifyIngestProposal( + EntityService mockService, int numberOfInvocations, List proposals) { + AspectsBatchImpl batch = + AspectsBatchImpl.builder().mcps(proposals, mockService.getEntityRegistry()).build(); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(batch), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(AuditStamp.class), Mockito.eq(false)); } public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } - public static void verifySingleIngestProposal(EntityService mockService, int numberOfInvocations) { - Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + public static void verifySingleIngestProposal( + EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)) + .ingestProposal( Mockito.any(MetadataChangeProposal.class), Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.eq(false)); } public static void verifyNoIngestProposal(EntityService mockService) { - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } - private TestUtils() { } + private TestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java index 7cd548a4790bae..57d85e5b204c21 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ResolverUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static org.testng.AssertJUnit.assertEquals; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,16 +18,11 @@ import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import graphql.schema.DataFetchingEnvironment; -import org.testng.annotations.Test; -import org.mockito.Mockito; - import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; -import static org.testng.AssertJUnit.assertEquals; - +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ResolverUtilsTest { @@ -35,46 +33,48 @@ public void testCriterionFromFilter() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockAllowContext); // this is the expected path - Criterion valuesCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), - false, - FilterOperator.EQUAL - ) - ); - assertEquals(valuesCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def")) - ).setNegated(false).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valuesCriterion = + criterionFromFilter( + new FacetFilterInput( + "tags", + null, + ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"), + false, + FilterOperator.EQUAL)); + assertEquals( + valuesCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc", "urn:li:tag:def"))) + .setNegated(false) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); // this is the legacy pathway - Criterion valueCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - "urn:li:tag:abc", - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(valueCriterion, new Criterion().setValue("urn:li:tag:abc").setValues( - new StringArray(ImmutableList.of("urn:li:tag:abc")) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + Criterion valueCriterion = + criterionFromFilter( + new FacetFilterInput("tags", "urn:li:tag:abc", null, true, FilterOperator.EQUAL)); + assertEquals( + valueCriterion, + new Criterion() + .setValue("urn:li:tag:abc") + .setValues(new StringArray(ImmutableList.of("urn:li:tag:abc"))) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); - // check that both being null doesn't cause a NPE. this should never happen except via API interaction - Criterion doubleNullCriterion = criterionFromFilter( - new FacetFilterInput( - "tags", - null, - null, - true, - FilterOperator.EQUAL - ) - ); - assertEquals(doubleNullCriterion, new Criterion().setValue("").setValues( - new StringArray(ImmutableList.of()) - ).setNegated(true).setCondition(Condition.EQUAL).setField("tags.keyword")); + // check that both being null doesn't cause a NPE. this should never happen except via API + // interaction + Criterion doubleNullCriterion = + criterionFromFilter(new FacetFilterInput("tags", null, null, true, FilterOperator.EQUAL)); + assertEquals( + doubleNullCriterion, + new Criterion() + .setValue("") + .setValues(new StringArray(ImmutableList.of())) + .setNegated(true) + .setCondition(Condition.EQUAL) + .setField("tags.keyword")); } @Test @@ -85,21 +85,25 @@ public void testBuildFilterWithUrns() throws Exception { urns.add(urn1); urns.add(urn2); - Criterion ownersCriterion = new Criterion() - .setField("owners") - .setValues(new StringArray("urn:li:corpuser:chris")) - .setCondition(Condition.EQUAL); + Criterion ownersCriterion = + new Criterion() + .setField("owners") + .setValues(new StringArray("urn:li:corpuser:chris")) + .setCondition(Condition.EQUAL); CriterionArray andCriterionArray = new CriterionArray(ImmutableList.of(ownersCriterion)); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(andCriterionArray) - ))); + filter.setOr( + new ConjunctiveCriterionArray( + ImmutableList.of(new ConjunctiveCriterion().setAnd(andCriterionArray)))); Filter finalFilter = buildFilterWithUrns(urns, filter); - Criterion urnsCriterion = new Criterion().setField("urn") - .setValue("") - .setValues(new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); + Criterion urnsCriterion = + new Criterion() + .setField("urn") + .setValue("") + .setValues( + new StringArray(urns.stream().map(Object::toString).collect(Collectors.toList()))); for (ConjunctiveCriterion conjunctiveCriterion : finalFilter.getOr()) { assertEquals(conjunctiveCriterion.getAnd().contains(ownersCriterion), true); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java index c7424174255cec..0d87ce4b2e2adf 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/UpdateLineageResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -9,35 +14,35 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.service.LineageService; import graphql.schema.DataFetchingEnvironment; -import org.joda.time.DateTimeUtils; -import org.mockito.Mockito; -import org.testng.annotations.BeforeMethod; -import org.testng.annotations.Test; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.joda.time.DateTimeUtils; +import org.mockito.Mockito; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; public class UpdateLineageResolverTest { private static EntityService _mockService = Mockito.mock(EntityService.class); private static LineageService _lineageService; private static DataFetchingEnvironment _mockEnv; - private static final String DATASET_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; - private static final String DATASET_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; - private static final String DATASET_URN_3 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; - private static final String DATASET_URN_4 = "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; + private static final String DATASET_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test1,DEV)"; + private static final String DATASET_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test2,DEV)"; + private static final String DATASET_URN_3 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test3,DEV)"; + private static final String DATASET_URN_4 = + "urn:li:dataset:(urn:li:dataPlatform:bigquery,test4,DEV)"; private static final String CHART_URN = "urn:li:chart:(looker,baz)"; private static final String DASHBOARD_URN = "urn:li:dashboard:(airflow,id)"; - private static final String DATAJOB_URN_1 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; - private static final String DATAJOB_URN_2 = "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; + private static final String DATAJOB_URN_1 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test1)"; + private static final String DATAJOB_URN_2 = + "urn:li:dataJob:(urn:li:dataFlow:(airflow,test,prod),test2)"; @BeforeMethod public void setupTest() { @@ -50,8 +55,12 @@ public void setupTest() { // Adds upstream for dataset1 to dataset2 and removes edge to dataset3 @Test public void testUpdateDatasetLineage() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -65,7 +74,8 @@ public void testUpdateDatasetLineage() throws Exception { @Test public void testFailUpdateWithMissingDownstream() throws Exception { - List edgesToAdd = Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); + List edgesToAdd = + Collections.singletonList(createLineageEdge(DATASET_URN_1, DATASET_URN_2)); mockInputAndContext(edgesToAdd, new ArrayList<>()); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -93,8 +103,12 @@ public void testUpdateChartLineage() throws Exception { // Adds upstream for dashboard to dataset2 and chart1 and removes edge to dataset1 @Test public void testUpdateDashboardLineage() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_2), createLineageEdge(DASHBOARD_URN, CHART_URN)); - List edgesToRemove = Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DASHBOARD_URN, DATASET_URN_2), + createLineageEdge(DASHBOARD_URN, CHART_URN)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DASHBOARD_URN, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -109,12 +123,13 @@ public void testUpdateDashboardLineage() throws Exception { // Adds upstream datajob and dataset and one downstream dataset @Test public void testUpdateDataJobLineage() throws Exception { - List edgesToAdd = Arrays.asList( - createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), - createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), - createLineageEdge(DATASET_URN_3, DATAJOB_URN_1) - ); - List edgesToRemove = Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATAJOB_URN_1, DATASET_URN_2), + createLineageEdge(DATAJOB_URN_1, DATAJOB_URN_2), + createLineageEdge(DATASET_URN_3, DATAJOB_URN_1)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATAJOB_URN_1, DATASET_URN_1)); mockInputAndContext(edgesToAdd, edgesToRemove); UpdateLineageResolver resolver = new UpdateLineageResolver(_mockService, _lineageService); @@ -129,8 +144,12 @@ public void testUpdateDataJobLineage() throws Exception { @Test public void testFailUpdateLineageNoPermissions() throws Exception { - List edgesToAdd = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_2), createLineageEdge(DATASET_URN_3, DATASET_URN_4)); - List edgesToRemove = Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); + List edgesToAdd = + Arrays.asList( + createLineageEdge(DATASET_URN_1, DATASET_URN_2), + createLineageEdge(DATASET_URN_3, DATASET_URN_4)); + List edgesToRemove = + Arrays.asList(createLineageEdge(DATASET_URN_1, DATASET_URN_3)); QueryContext mockContext = getMockDenyContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); @@ -147,7 +166,6 @@ public void testFailUpdateLineageNoPermissions() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(_mockEnv).join()); } - private void mockInputAndContext(List edgesToAdd, List edgesToRemove) { QueryContext mockContext = getMockAllowContext(); UpdateLineageInput input = new UpdateLineageInput(edgesToAdd, edgesToRemove); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java index 6fdb1f2b70ce4b..f590e71146eb41 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.assertion.AssertionResult; @@ -19,9 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class AssertionRunEventResolverTest { @Test public void testGetSuccess() throws Exception { @@ -29,35 +28,36 @@ public void testGetSuccess() throws Exception { final Urn assertionUrn = Urn.createFromString("urn:li:assertion:guid-1"); final Urn asserteeUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); - final AssertionRunEvent gmsRunEvent = new AssertionRunEvent() - .setTimestampMillis(12L) - .setAssertionUrn(assertionUrn) - .setRunId("test-id") - .setAsserteeUrn(asserteeUrn) - .setStatus(AssertionRunStatus.COMPLETE) - .setResult(new AssertionResult() - .setActualAggValue(10) - .setMissingCount(0L) - .setRowCount(1L) - .setType(AssertionResultType.SUCCESS) - .setUnexpectedCount(2L) - ); + final AssertionRunEvent gmsRunEvent = + new AssertionRunEvent() + .setTimestampMillis(12L) + .setAssertionUrn(assertionUrn) + .setRunId("test-id") + .setAsserteeUrn(asserteeUrn) + .setStatus(AssertionRunStatus.COMPLETE) + .setResult( + new AssertionResult() + .setActualAggValue(10) + .setMissingCount(0L) + .setRowCount(1L) + .setType(AssertionResultType.SUCCESS) + .setUnexpectedCount(2L)); - Mockito.when(mockClient.getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.eq(AssertionRunEventResolver.buildFilter(null, AssertionRunStatus.COMPLETE.toString())), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableList.of( - new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)) - ) - ); + Mockito.when( + mockClient.getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.eq( + AssertionRunEventResolver.buildFilter( + null, AssertionRunStatus.COMPLETE.toString())), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableList.of( + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(gmsRunEvent)))); AssertionRunEventResolver resolver = new AssertionRunEventResolver(mockClient); @@ -66,9 +66,12 @@ public void testGetSuccess() throws Exception { Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))).thenReturn("COMPLETE"); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))).thenReturn(0L); - Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))).thenReturn(10L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("status"), Mockito.eq(null))) + .thenReturn("COMPLETE"); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("startTimeMillis"), Mockito.eq(null))) + .thenReturn(0L); + Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("endTimeMillis"), Mockito.eq(null))) + .thenReturn(10L); Mockito.when(mockEnv.getArgumentOrDefault(Mockito.eq("limit"), Mockito.eq(null))).thenReturn(5); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -78,32 +81,37 @@ public void testGetSuccess() throws Exception { AssertionRunEventsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).getTimeseriesAspectValues( - Mockito.eq(assertionUrn.toString()), - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.eq(0L), - Mockito.eq(10L), - Mockito.eq(5), - Mockito.any(Filter.class), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .getTimeseriesAspectValues( + Mockito.eq(assertionUrn.toString()), + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.eq(0L), + Mockito.eq(10L), + Mockito.eq(5), + Mockito.any(Filter.class), + Mockito.any(Authentication.class)); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getTotal(), 1); assertEquals(result.getFailed(), 0); assertEquals(result.getSucceeded(), 1); - com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = resolver.get(mockEnv).get().getRunEvents().get(0); + com.linkedin.datahub.graphql.generated.AssertionRunEvent graphqlRunEvent = + resolver.get(mockEnv).get().getRunEvents().get(0); assertEquals(graphqlRunEvent.getAssertionUrn(), assertionUrn.toString()); assertEquals(graphqlRunEvent.getAsserteeUrn(), asserteeUrn.toString()); assertEquals(graphqlRunEvent.getRunId(), "test-id"); - assertEquals(graphqlRunEvent.getStatus(), com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); + assertEquals( + graphqlRunEvent.getStatus(), + com.linkedin.datahub.graphql.generated.AssertionRunStatus.COMPLETE); assertEquals((float) graphqlRunEvent.getTimestampMillis(), 12L); assertEquals((float) graphqlRunEvent.getResult().getActualAggValue(), 10); assertEquals((long) graphqlRunEvent.getResult().getMissingCount(), 0L); assertEquals((long) graphqlRunEvent.getResult().getRowCount(), 1L); assertEquals((long) graphqlRunEvent.getResult().getUnexpectedCount(), 2L); - assertEquals(graphqlRunEvent.getResult().getType(), com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); + assertEquals( + graphqlRunEvent.getResult().getType(), + com.linkedin.datahub.graphql.generated.AssertionResultType.SUCCESS); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java index 8afec0a8895776..019d254ffdaac7 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/DeleteAssertionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.assertion.AssertionInfo; @@ -18,10 +21,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteAssertionResolverTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; @@ -33,20 +32,17 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -58,20 +54,19 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -80,11 +75,10 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn(null); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn(null); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -96,20 +90,19 @@ public void testGetSuccessNoAssertionInfoFound() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); - Mockito.verify(mockService, Mockito.times(1)).getAspect( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), - Mockito.eq(0L) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getAspect( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.eq(Constants.ASSERTION_INFO_ASPECT_NAME), + Mockito.eq(0L)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -130,21 +123,20 @@ public void testGetSuccessAssertionAlreadyRemoved() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), - Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(Urn.createFromString(TEST_ASSERTION_URN))), + Mockito.eq(ImmutableSet.of(Constants.ASSERTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ASSERTION_URN))); } @Test @@ -153,20 +145,17 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); - Mockito.when(mockService.getAspect( - Urn.createFromString(TEST_ASSERTION_URN), - Constants.ASSERTION_INFO_ASPECT_NAME, - 0L - )).thenReturn( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion( - new DatasetAssertionInfo() - .setDataset(Urn.createFromString(TEST_DATASET_URN)) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setOperator(AssertionStdOperator.BETWEEN) - ) - ); + Mockito.when( + mockService.getAspect( + Urn.createFromString(TEST_ASSERTION_URN), Constants.ASSERTION_INFO_ASPECT_NAME, 0L)) + .thenReturn( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(Urn.createFromString(TEST_DATASET_URN)) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setOperator(AssertionStdOperator.BETWEEN))); DeleteAssertionResolver resolver = new DeleteAssertionResolver(mockClient, mockService); @@ -177,17 +166,16 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ASSERTION_URN))).thenReturn(true); @@ -202,4 +190,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java index c5b5725f23b7ae..19152a7a11877a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/assertion/EntityAssertionsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -37,9 +39,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityAssertionsResolverTest { @Test public void testGetSuccess() throws Exception { @@ -49,73 +48,76 @@ public void testGetSuccess() throws Exception { Urn datasetUrn = Urn.createFromString("urn:li:dataset:(test,test,test)"); Urn assertionUrn = Urn.createFromString("urn:li:assertion:test-guid"); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(datasetUrn.toString()), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(assertionUrn) - .setType("Asserts")) - )) - ); - + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(datasetUrn.toString()), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship().setEntity(assertionUrn).setType("Asserts"))))); Map assertionAspects = new HashMap<>(); assertionAspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionKey().setAssertionId("test-guid").data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue(new Aspect(new AssertionKey().setAssertionId("test-guid").data()))); assertionAspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(new DatasetAssertionInfo() - .setDataset(datasetUrn) - .setScope(DatasetAssertionScope.DATASET_COLUMN) - .setAggregation(AssertionStdAggregation.MAX) - .setOperator(AssertionStdOperator.EQUAL_TO) - .setFields(new UrnArray(ImmutableList.of( - Urn.createFromString("urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)") - ))) - .setParameters(new AssertionStdParameters().setValue(new AssertionStdParameter() - .setValue("10") - .setType( - AssertionStdParameterType.NUMBER))) - ).data() - )) - ); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion( + new DatasetAssertionInfo() + .setDataset(datasetUrn) + .setScope(DatasetAssertionScope.DATASET_COLUMN) + .setAggregation(AssertionStdAggregation.MAX) + .setOperator(AssertionStdOperator.EQUAL_TO) + .setFields( + new UrnArray( + ImmutableList.of( + Urn.createFromString( + "urn:li:schemaField:(urn:li:dataset:(test,test,test),fieldPath)")))) + .setParameters( + new AssertionStdParameters() + .setValue( + new AssertionStdParameter() + .setValue("10") + .setType(AssertionStdParameterType.NUMBER)))) + .data()))); assertionAspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect( - new DataPlatformInstance() - .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) - .data() - )) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn)), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(ImmutableMap.of( - assertionUrn, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn) - .setAspects(new EnvelopedAspectMap(assertionAspects)))); + new com.linkedin.entity.EnvelopedAspect() + .setValue( + new Aspect( + new DataPlatformInstance() + .setPlatform(Urn.createFromString("urn:li:dataPlatform:hive")) + .data()))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn)), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn) + .setAspects(new EnvelopedAspectMap(assertionAspects)))); EntityAssertionsResolver resolver = new EntityAssertionsResolver(mockClient, graphClient); @@ -134,38 +136,45 @@ public void testGetSuccess() throws Exception { EntityAssertionsResult result = resolver.get(mockEnv).get(); - Mockito.verify(graphClient, Mockito.times(1)).getRelatedEntities( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); - - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(graphClient, Mockito.times(1)) + .getRelatedEntities( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any()); + + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); // Assert that GraphQL assertion run event matches expectations assertEquals(result.getStart(), 0); assertEquals(result.getCount(), 1); assertEquals(result.getTotal(), 1); - com.linkedin.datahub.graphql.generated.Assertion assertion = resolver.get(mockEnv).get().getAssertions().get(0); + com.linkedin.datahub.graphql.generated.Assertion assertion = + resolver.get(mockEnv).get().getAssertions().get(0); assertEquals(assertion.getUrn(), assertionUrn.toString()); assertEquals(assertion.getType(), EntityType.ASSERTION); assertEquals(assertion.getPlatform().getUrn(), "urn:li:dataPlatform:hive"); - assertEquals(assertion.getInfo().getType(), com.linkedin.datahub.graphql.generated.AssertionType.DATASET); + assertEquals( + assertion.getInfo().getType(), + com.linkedin.datahub.graphql.generated.AssertionType.DATASET); assertEquals(assertion.getInfo().getDatasetAssertion().getDatasetUrn(), datasetUrn.toString()); - assertEquals(assertion.getInfo().getDatasetAssertion().getScope(), com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); - assertEquals(assertion.getInfo().getDatasetAssertion().getAggregation(), com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); - assertEquals(assertion.getInfo().getDatasetAssertion().getOperator(), com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), + assertEquals( + assertion.getInfo().getDatasetAssertion().getScope(), + com.linkedin.datahub.graphql.generated.DatasetAssertionScope.DATASET_COLUMN); + assertEquals( + assertion.getInfo().getDatasetAssertion().getAggregation(), + com.linkedin.datahub.graphql.generated.AssertionStdAggregation.MAX); + assertEquals( + assertion.getInfo().getDatasetAssertion().getOperator(), + com.linkedin.datahub.graphql.generated.AssertionStdOperator.EQUAL_TO); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getType(), com.linkedin.datahub.graphql.generated.AssertionStdParameterType.NUMBER); - assertEquals(assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); + assertEquals( + assertion.getInfo().getDatasetAssertion().getParameters().getValue().getValue(), "10"); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 52d06f73dcfab9..419eb71d5e143d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; @@ -18,9 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; - - public class ListAccessTokensResolverTest { @Test @@ -42,16 +41,22 @@ public void testGetSuccess() throws Exception { final EntityClient mockClient = Mockito.mock(EntityClient.class); final Authentication testAuth = getAuthentication(mockEnv); - Mockito.when(mockClient.search( - Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(buildFilter(filters, Collections.emptyList())), - Mockito.any(SortCriterion.class), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.eq(testAuth), - Mockito.any(SearchFlags.class))) - .thenReturn(new SearchResult().setFrom(0).setNumEntities(0).setPageSize(0).setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(buildFilter(filters, Collections.emptyList())), + Mockito.any(SortCriterion.class), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.eq(testAuth), + Mockito.any(SearchFlags.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setNumEntities(0) + .setPageSize(0) + .setEntities(new SearchEntityArray())); final ListAccessTokensResolver resolver = new ListAccessTokensResolver(mockClient); final ListAccessTokenResult listAccessTokenResult = resolver.get(mockEnv).get(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java index 4a948537ab4fed..433772d7e2cfe1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/BrowseV2ResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -29,15 +31,12 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; +import java.util.ArrayList; +import java.util.List; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; -import java.util.ArrayList; -import java.util.List; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class BrowseV2ResolverTest { private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @@ -46,23 +45,30 @@ public class BrowseV2ResolverTest { @Test public static void testBrowseV2Success() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - null, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + null, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -92,23 +98,30 @@ public static void testBrowseV2SuccessWithQueryAndFilter() throws Exception { orFilters.add(andFilterInput); Filter filter = ResolverUtils.buildFilter(null, orFilters); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "test", - filter, - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "test", + filter, + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, mockService); @@ -132,23 +145,30 @@ public static void testBrowseV2SuccessWithView() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - "dataset", - "␟test␟path", - "*", - viewInfo.getDefinition().getFilter(), - 0, - 10, - new BrowseResultV2() - .setNumGroups(2) - .setGroups(new BrowseResultGroupV2Array( - new BrowseResultGroupV2().setCount(5).setName("first group").setHasSubGroups(true), - new BrowseResultGroupV2().setCount(4).setName("second group").setHasSubGroups(false) - )) - .setMetadata(new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) - .setFrom(0) - .setPageSize(10) - ); + EntityClient mockClient = + initMockEntityClient( + "dataset", + "␟test␟path", + "*", + viewInfo.getDefinition().getFilter(), + 0, + 10, + new BrowseResultV2() + .setNumGroups(2) + .setGroups( + new BrowseResultGroupV2Array( + new BrowseResultGroupV2() + .setCount(5) + .setName("first group") + .setHasSubGroups(true), + new BrowseResultGroupV2() + .setCount(4) + .setName("second group") + .setHasSubGroups(false))) + .setMetadata( + new BrowseResultMetadata().setPath("␟test␟path").setTotalNumEntities(100)) + .setFrom(0) + .setPageSize(10)); final BrowseV2Resolver resolver = new BrowseV2Resolver(mockClient, viewService); @@ -166,16 +186,25 @@ public static void testBrowseV2SuccessWithView() throws Exception { compareResultToExpectedData(result, getExpectedResult()); } - private static void compareResultToExpectedData(BrowseResultsV2 result, BrowseResultsV2 expected) { + private static void compareResultToExpectedData( + BrowseResultsV2 result, BrowseResultsV2 expected) { Assert.assertEquals(result.getCount(), expected.getCount()); Assert.assertEquals(result.getStart(), expected.getStart()); Assert.assertEquals(result.getTotal(), expected.getTotal()); Assert.assertEquals(result.getGroups().size(), expected.getGroups().size()); - result.getGroups().forEach(group -> { - Assert.assertTrue(expected.getGroups().stream().filter(g -> g.getName().equals(group.getName())).count() > 0); - }); + result + .getGroups() + .forEach( + group -> { + Assert.assertTrue( + expected.getGroups().stream() + .filter(g -> g.getName().equals(group.getName())) + .count() + > 0); + }); Assert.assertEquals(result.getMetadata().getPath(), expected.getMetadata().getPath()); - Assert.assertEquals(result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); + Assert.assertEquals( + result.getMetadata().getTotalNumEntities(), expected.getMetadata().getTotalNumEntities()); } private static BrowseResultsV2 getExpectedResult() { @@ -185,19 +214,22 @@ private static BrowseResultsV2 getExpectedResult() { results.setCount(10); List groups = new ArrayList<>(); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup1 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup1.setName("first group"); browseGroup1.setCount(5L); browseGroup1.setHasSubGroups(true); groups.add(browseGroup1); - com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); + com.linkedin.datahub.graphql.generated.BrowseResultGroupV2 browseGroup2 = + new com.linkedin.datahub.graphql.generated.BrowseResultGroupV2(); browseGroup2.setName("second group"); browseGroup2.setCount(4L); browseGroup2.setHasSubGroups(false); groups.add(browseGroup2); results.setGroups(groups); - com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); + com.linkedin.datahub.graphql.generated.BrowseResultMetadata resultMetadata = + new com.linkedin.datahub.graphql.generated.BrowseResultMetadata(); resultMetadata.setPath(ImmutableList.of("test", "path")); resultMetadata.setTotalNumEntities(100L); results.setMetadata(resultMetadata); @@ -212,60 +244,52 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - BrowseResultV2 result - ) throws Exception { + BrowseResultV2 result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.browseV2( - Mockito.eq(entityName), - Mockito.eq(path), - Mockito.eq(filter), - Mockito.eq(query), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.browseV2( + Mockito.eq(ImmutableList.of(entityName)), + Mockito.eq(path), + Mockito.eq(filter), + Mockito.eq(query), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } + private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } - private BrowseV2ResolverTest() { } - + private BrowseV2ResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java index 659e6aea740ec1..75abf1d48a15c8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/browse/EntityBrowsePathsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.browse; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -16,12 +18,10 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class EntityBrowsePathsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { @@ -30,9 +30,7 @@ public void testGetSuccess() throws Exception { List path = ImmutableList.of("prod", "mysql"); Mockito.when(mockType.browsePaths(Mockito.eq(TEST_ENTITY_URN), Mockito.any())) - .thenReturn(ImmutableList.of( - new BrowsePath(path)) - ); + .thenReturn(ImmutableList.of(new BrowsePath(path))); // Execute resolver QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -55,9 +53,9 @@ public void testGetSuccess() throws Exception { @Test public void testGetBrowsePathsException() throws Exception { BrowsableEntityType mockType = Mockito.mock(BrowsableEntityType.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockType).browsePaths( - Mockito.any(), - Mockito.any()); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockType) + .browsePaths(Mockito.any(), Mockito.any()); EntityBrowsePathsResolver resolver = new EntityBrowsePathsResolver(mockType); @@ -75,4 +73,4 @@ public void testGetBrowsePathsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java index 39a08ca26167d1..1203f4e22bdc23 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ContainerEntitiesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -24,17 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class ContainerEntitiesResolverTest { - private static final ContainerEntitiesInput TEST_INPUT = new ContainerEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final ContainerEntitiesInput TEST_INPUT = + new ContainerEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -44,35 +39,39 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String containerUrn = "urn:li:container:test-container"; - final Criterion filterCriterion = new Criterion() - .setField("container.keyword") - .setCondition(Condition.EQUAL) - .setValue(containerUrn); + final Criterion filterCriterion = + new Criterion() + .setField("container.keyword") + .setCondition(Condition.EQUAL) + .setValue(containerUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq(ContainerEntitiesResolver.CONTAINABLE_ENTITY_NAMES), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); ContainerEntitiesResolver resolver = new ContainerEntitiesResolver(mockClient); @@ -92,6 +91,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java index 92f8dfc4e1d67c..b4c58ca182b2f3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/container/ParentContainersResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.container; +import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; +import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.container.Container; @@ -14,18 +19,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.CONTAINER_ASPECT_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_ENTITY_NAME; -import static com.linkedin.metadata.Constants.CONTAINER_PROPERTIES_ASPECT_NAME; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentContainersResolverTest { @Test @@ -42,77 +40,88 @@ public void testGetSuccess() throws Exception { datasetEntity.setType(EntityType.DATASET); Mockito.when(mockEnv.getSource()).thenReturn(datasetEntity); - final Container parentContainer1 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); - final Container parentContainer2 = new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); + final Container parentContainer1 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container")); + final Container parentContainer2 = + new Container().setContainer(Urn.createFromString("urn:li:container:test-container2")); Map datasetAspects = new HashMap<>(); - datasetAspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); + datasetAspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer1.data()))); Map parentContainer1Aspects = new HashMap<>(); - parentContainer1Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_schema").data() - ))); - parentContainer1Aspects.put(CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - parentContainer2.data() - ))); + parentContainer1Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_schema").data()))); + parentContainer1Aspects.put( + CONTAINER_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentContainer2.data()))); Map parentContainer2Aspects = new HashMap<>(); - parentContainer2Aspects.put(CONTAINER_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new ContainerProperties().setName("test_database").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(datasetUrn.getEntityType()), - Mockito.eq(datasetUrn), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer1.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer1.getContainer().getEntityType()), - Mockito.eq(parentContainer1.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(CONTAINER_ENTITY_NAME) - .setUrn(parentContainer2.getContainer()) - .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentContainer2.getContainer().getEntityType()), - Mockito.eq(parentContainer2.getContainer()), - Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + parentContainer2Aspects.put( + CONTAINER_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new ContainerProperties().setName("test_database").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(datasetUrn.getEntityType()), + Mockito.eq(datasetUrn), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(datasetAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer1.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer1.getContainer().getEntityType()), + Mockito.eq(parentContainer1.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(CONTAINER_ENTITY_NAME) + .setUrn(parentContainer2.getContainer()) + .setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentContainer2.getContainer().getEntityType()), + Mockito.eq(parentContainer2.getContainer()), + Mockito.eq(Collections.singleton(CONTAINER_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse().setAspects(new EnvelopedAspectMap(parentContainer2Aspects))); ParentContainersResolver resolver = new ParentContainersResolver(mockClient); ParentContainersResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); - assertEquals(result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); - assertEquals(result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); + assertEquals( + result.getContainers().get(0).getUrn(), parentContainer1.getContainer().toString()); + assertEquals( + result.getContainers().get(1).getUrn(), parentContainer2.getContainer().toString()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java index 6a9617ea41b448..2abfa39b35149a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dashboard/DashboardStatsSummaryTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dashboard; +import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.UrnUtils; @@ -28,9 +30,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.dashboard.DashboardUsageStatsUtils.*; - - public class DashboardStatsSummaryTest { private static final Dashboard TEST_SOURCE = new Dashboard(); @@ -65,31 +64,35 @@ public void testGetSuccess() throws Exception { Assert.assertEquals((int) result.getUniqueUserCountLast30Days(), 2); // Validate the cache. -- First return a new result. - DashboardUsageStatistics newUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(40); - EnvelopedAspect newResult = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); + DashboardUsageStatistics newUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(40); + EnvelopedAspect newResult = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(newUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn(ImmutableList.of(newResult)); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(newResult)); // Then verify that the new result is _not_ returned (cache hit) DashboardStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getViewCount(), 20); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 2); } @@ -97,28 +100,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DASHBOARD_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DASHBOARD_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -140,48 +142,46 @@ private TimeseriesAspectService initTestAspectService() { TimeseriesAspectService mockClient = Mockito.mock(TimeseriesAspectService.class); // Mock fetching the latest absolute (snapshot) statistics - DashboardUsageStatistics latestUsageStats = new DashboardUsageStatistics() - .setTimestampMillis(0L) - .setLastViewedAt(0L) - .setExecutionsCount(10) - .setFavoritesCount(5) - .setViewsCount(20); - EnvelopedAspect envelopedLatestStats = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); + DashboardUsageStatistics latestUsageStats = + new DashboardUsageStatistics() + .setTimestampMillis(0L) + .setLastViewedAt(0L) + .setExecutionsCount(10) + .setFavoritesCount(5) + .setViewsCount(20); + EnvelopedAspect envelopedLatestStats = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(latestUsageStats)); Filter filterForLatestStats = createUsageFilter(TEST_DASHBOARD_URN, null, null, false); - Mockito.when(mockClient.getAspectValues( - Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.eq(null), - Mockito.eq(null), - Mockito.eq(1), - Mockito.eq(filterForLatestStats) - )).thenReturn( - ImmutableList.of(envelopedLatestStats) - ); - - Mockito.when(mockClient.getAggregatedStats( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), - Mockito.any(), - Mockito.any(Filter.class), - Mockito.any() - )).thenReturn( - new GenericTable().setRows(new StringArrayArray( - new StringArray(ImmutableList.of( - TEST_USER_URN_1, "10", "20", "30", "1", "1", "1" - )), - new StringArray(ImmutableList.of( - TEST_USER_URN_2, "20", "30", "40", "1", "1", "1" - )) - )) - .setColumnNames(new StringArray()) - .setColumnTypes(new StringArray()) - ); + Mockito.when( + mockClient.getAspectValues( + Mockito.eq(UrnUtils.getUrn(TEST_DASHBOARD_URN)), + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.eq(null), + Mockito.eq(null), + Mockito.eq(1), + Mockito.eq(filterForLatestStats))) + .thenReturn(ImmutableList.of(envelopedLatestStats)); + + Mockito.when( + mockClient.getAggregatedStats( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(Constants.DASHBOARD_USAGE_STATISTICS_ASPECT_NAME), + Mockito.any(), + Mockito.any(Filter.class), + Mockito.any())) + .thenReturn( + new GenericTable() + .setRows( + new StringArrayArray( + new StringArray( + ImmutableList.of(TEST_USER_URN_1, "10", "20", "30", "1", "1", "1")), + new StringArray( + ImmutableList.of(TEST_USER_URN_2, "20", "30", "40", "1", "1", "1")))) + .setColumnNames(new StringArray()) + .setColumnTypes(new StringArray())); return mockClient; } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java index ea9ab2a1b768b7..3ff0120448e545 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetHealthResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.dataset; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.EntityRelationship; @@ -23,60 +25,54 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class DatasetHealthResolverTest { private static final String TEST_DATASET_URN = "urn:li:dataset:(test,test,test)"; private static final String TEST_ASSERTION_URN = "urn:li:assertion:test-guid"; private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:test-guid-2"; - @Test public void testGetSuccessHealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(1) - .setTotal(1) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts")) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(1) + .setTotal(1) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -103,20 +99,20 @@ public void testGetSuccessNullHealth() throws Exception { TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); // 0 associated assertions, meaning we don't report any health. - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(0) - .setRelationships(new EntityRelationshipArray(Collections.emptyList())) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(0) + .setRelationships(new EntityRelationshipArray(Collections.emptyList()))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); @@ -134,13 +130,9 @@ public void testGetSuccessNullHealth() throws Exception { List result = resolver.get(mockEnv).get(); assertEquals(result.size(), 0); - Mockito.verify(mockAspectService, Mockito.times(0)).getAggregatedStats( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockAspectService, Mockito.times(0)) + .getAggregatedStats( + Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); } @Test @@ -148,52 +140,47 @@ public void testGetSuccessUnhealthy() throws Exception { GraphClient graphClient = Mockito.mock(GraphClient.class); TimeseriesAspectService mockAspectService = Mockito.mock(TimeseriesAspectService.class); - Mockito.when(graphClient.getRelatedEntities( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(ImmutableList.of("Asserts")), - Mockito.eq(RelationshipDirection.INCOMING), - Mockito.eq(0), - Mockito.eq(500), - Mockito.any()) - ).thenReturn( - new EntityRelationships() - .setStart(0) - .setCount(0) - .setTotal(2) - .setRelationships(new EntityRelationshipArray( - ImmutableList.of(new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) - .setType("Asserts"), - new EntityRelationship() - .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) - .setType("Asserts") - ) - )) - ); - - Mockito.when(mockAspectService.getAggregatedStats( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any()) - ).thenReturn( - new GenericTable() - .setColumnNames(new StringArray(ImmutableList.of( - "assertionUrn", "type", "timestampMillis" - ))) - .setColumnTypes(new StringArray("string", "string", "long")) - .setRows(new StringArrayArray( - ImmutableList.of( - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN, "SUCCESS", "0" - )), - new StringArray(ImmutableList.of( - TEST_ASSERTION_URN_2, "FAILURE", "0" - )) - ) - )) - ); + Mockito.when( + graphClient.getRelatedEntities( + Mockito.eq(TEST_DATASET_URN), + Mockito.eq(ImmutableList.of("Asserts")), + Mockito.eq(RelationshipDirection.INCOMING), + Mockito.eq(0), + Mockito.eq(500), + Mockito.any())) + .thenReturn( + new EntityRelationships() + .setStart(0) + .setCount(0) + .setTotal(2) + .setRelationships( + new EntityRelationshipArray( + ImmutableList.of( + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN)) + .setType("Asserts"), + new EntityRelationship() + .setEntity(Urn.createFromString(TEST_ASSERTION_URN_2)) + .setType("Asserts"))))); + + Mockito.when( + mockAspectService.getAggregatedStats( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(Constants.ASSERTION_RUN_EVENT_ASPECT_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any())) + .thenReturn( + new GenericTable() + .setColumnNames( + new StringArray(ImmutableList.of("assertionUrn", "type", "timestampMillis"))) + .setColumnTypes(new StringArray("string", "string", "long")) + .setRows( + new StringArrayArray( + ImmutableList.of( + new StringArray(ImmutableList.of(TEST_ASSERTION_URN, "SUCCESS", "0")), + new StringArray( + ImmutableList.of(TEST_ASSERTION_URN_2, "FAILURE", "0")))))); DatasetHealthResolver resolver = new DatasetHealthResolver(graphClient, mockAspectService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java index 013e23b779c517..52516295f97ade 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/dataset/DatasetStatsSummaryResolverTest.java @@ -19,11 +19,11 @@ import org.testng.Assert; import org.testng.annotations.Test; - public class DatasetStatsSummaryResolverTest { private static final Dataset TEST_SOURCE = new Dataset(); - private static final String TEST_DATASET_URN = "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; + private static final String TEST_DATASET_URN = + "urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"; private static final String TEST_USER_URN_1 = "urn:li:corpuser:test1"; private static final String TEST_USER_URN_2 = "urn:li:corpuser:test2"; @@ -35,28 +35,27 @@ public class DatasetStatsSummaryResolverTest { public void testGetSuccess() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(testResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(testResult); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); @@ -84,17 +83,19 @@ public void testGetSuccess() throws Exception { // Validate the cache. -- First return a new result. UsageQueryResult newResult = new UsageQueryResult(); newResult.setAggregations(new UsageQueryResultAggregations()); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenReturn(newResult); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenReturn(newResult); // Then verify that the new result is _not_ returned (cache hit) DatasetStatsSummary cachedResult = resolver.get(mockEnv).get(); Assert.assertEquals((int) cachedResult.getQueryCountLast30Days(), 10); Assert.assertEquals((int) cachedResult.getTopUsersLast30Days().size(), 2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); - Assert.assertEquals((String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(0).getUrn(), TEST_USER_URN_2); + Assert.assertEquals( + (String) cachedResult.getTopUsersLast30Days().get(1).getUrn(), TEST_USER_URN_1); Assert.assertEquals((int) cachedResult.getUniqueUserCountLast30Days(), 5); } @@ -102,28 +103,27 @@ public void testGetSuccess() throws Exception { public void testGetException() throws Exception { // Init test UsageQueryResult UsageQueryResult testResult = new UsageQueryResult(); - testResult.setAggregations(new UsageQueryResultAggregations() - .setUniqueUserCount(5) - .setTotalSqlQueries(10) - .setUsers(new UserUsageCountsArray( - ImmutableList.of( - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) - .setUserEmail("test1@gmail.com") - .setCount(20), - new UserUsageCounts() - .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) - .setUserEmail("test2@gmail.com") - .setCount(30) - ) - )) - ); + testResult.setAggregations( + new UsageQueryResultAggregations() + .setUniqueUserCount(5) + .setTotalSqlQueries(10) + .setUsers( + new UserUsageCountsArray( + ImmutableList.of( + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_1)) + .setUserEmail("test1@gmail.com") + .setCount(20), + new UserUsageCounts() + .setUser(UrnUtils.getUrn(TEST_USER_URN_2)) + .setUserEmail("test2@gmail.com") + .setCount(30))))); UsageClient mockClient = Mockito.mock(UsageClient.class); - Mockito.when(mockClient.getUsageStats( - Mockito.eq(TEST_DATASET_URN), - Mockito.eq(UsageTimeRange.MONTH) - )).thenThrow(RuntimeException.class); + Mockito.when( + mockClient.getUsageStats( + Mockito.eq(TEST_DATASET_URN), Mockito.eq(UsageTimeRange.MONTH))) + .thenThrow(RuntimeException.class); // Execute resolver DatasetStatsSummaryResolver resolver = new DatasetStatsSummaryResolver(mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index bae6f27a854bc7..49ccc751d35f63 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.delete; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Status; @@ -14,39 +18,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateSoftDeletedResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -55,17 +56,21 @@ public void testGetSuccessNoExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), true); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(true); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -76,16 +81,18 @@ public void testGetSuccessExistingStatus() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalStatus); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -96,17 +103,21 @@ public void testGetSuccessExistingStatus() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Status newStatus = new Status().setRemoved(false); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - STATUS_ASPECT_NAME, newStatus); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), STATUS_ASPECT_NAME, newStatus); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), STATUS_ASPECT_NAME, newStatus); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -115,15 +126,17 @@ public void testGetSuccessExistingStatus() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.STATUS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.STATUS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -134,7 +147,9 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,7 +166,9 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); @@ -165,20 +182,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateSoftDeletedInput input = new BatchUpdateSoftDeletedInput(ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); + BatchUpdateSoftDeletedInput input = + new BatchUpdateSoftDeletedInput( + ImmutableList.of(TEST_ENTITY_URN_1, TEST_ENTITY_URN_2), false); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index ce5a02bb573e18..8c3620fa978a98 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; @@ -15,39 +19,36 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchUpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; @Test public void testGetSuccessNoExistingDeprecation() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -56,46 +57,57 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 0L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 0L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(0L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(0L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - final Deprecation originalDeprecation = new Deprecation() - .setDeprecated(false) - .setNote("") - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setNote("") + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDeprecation); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,23 +118,31 @@ public void testGetSuccessExistingDeprecation() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setNote("test") - .setDecommissionTime(1L) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setNote("test") + .setDecommissionTime(1L) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DEPRECATION_ASPECT_NAME, newDeprecation); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DEPRECATION_ASPECT_NAME, newDeprecation); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DEPRECATION_ASPECT_NAME, newDeprecation); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -131,15 +151,17 @@ public void testGetSuccessExistingDeprecation() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DEPRECATION_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -150,9 +172,14 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -168,9 +195,14 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -183,21 +215,29 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchUpdateDeprecationInput input = new BatchUpdateDeprecationInput(true, 1L, "test", ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchUpdateDeprecationInput input = + new BatchUpdateDeprecationInput( + true, + 1L, + "test", + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java index 5d30ae08d6dea2..e4be330f5ba2ac 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/UpdateDeprecationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.deprecation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -26,20 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateDeprecationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = new UpdateDeprecationInput( - TEST_ENTITY_URN, - true, - 0L, - "Test note" - ); + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final UpdateDeprecationInput TEST_DEPRECATION_INPUT = + new UpdateDeprecationInput(TEST_ENTITY_URN, true, 0L, "Test note"); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test @@ -47,16 +43,19 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -71,41 +70,53 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation().setDeprecated(true).setDecommissionTime(0L).setNote("Test note").setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDeprecation() throws Exception { - Deprecation originalDeprecation = new Deprecation().setDeprecated(false).setDecommissionTime(1L).setActor(TEST_ACTOR_URN).setNote(""); + Deprecation originalDeprecation = + new Deprecation() + .setDeprecated(false) + .setDecommissionTime(1L) + .setActor(TEST_ACTOR_URN) + .setNote(""); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDeprecation.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDeprecation.data()))))))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -120,24 +131,21 @@ public void testGetSuccessExistingDeprecation() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Deprecation newDeprecation = new Deprecation() - .setDeprecated(true) - .setDecommissionTime(0L) - .setNote("Test note") - .setActor(TEST_ACTOR_URN); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DEPRECATION_ASPECT_NAME, newDeprecation); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - + final Deprecation newDeprecation = + new Deprecation() + .setDeprecated(true) + .setDecommissionTime(0L) + .setNote("Test note") + .setActor(TEST_ACTOR_URN); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DEPRECATION_ASPECT_NAME, newDeprecation); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -145,16 +153,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DEPRECATION_ASPECT_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DEPRECATION_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DEPRECATION_ASPECT_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = Mockito.mock(EntityService.class); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -169,9 +180,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -188,18 +198,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateDeprecationResolver resolver = new UpdateDeprecationResolver(mockClient, mockService); // Execute resolver @@ -210,4 +219,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 8cd3c71a21555b..d5ba88066e8461 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.UrnArray; @@ -18,21 +22,17 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchSetDomainResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_DOMAIN_1_URN = "urn:li:domain:test-id-1"; private static final String TEST_DOMAIN_2_URN = "urn:li:domain:test-id-2"; @@ -40,19 +40,20 @@ public class BatchSetDomainResolverTest { public void testGetSuccessNoExistingDomains() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,46 +65,53 @@ public void testGetSuccessNoExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -117,51 +125,58 @@ public void testGetSuccessExistingDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(TEST_DOMAIN_2_URN, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + TEST_DOMAIN_2_URN, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_2_URN) - ))); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_2_URN)))); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); proposal1.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_1)); proposal1.setEntityType(Constants.DATASET_ENTITY_NAME); proposal1.setAspectName(Constants.DOMAINS_ASPECT_NAME); proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN))); } @Test public void testGetSuccessUnsetDomains() throws Exception { - final Domains originalDomain = new Domains().setDomains(new UrnArray(ImmutableList.of( - Urn.createFromString(TEST_DOMAIN_1_URN)))); + final Domains originalDomain = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_DOMAIN_1_URN)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalDomain); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,19 +190,24 @@ public void testGetSuccessUnsetDomains() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - DOMAINS_ASPECT_NAME, newDomains); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), DOMAINS_ASPECT_NAME, newDomains); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -196,10 +216,11 @@ public void testGetSuccessUnsetDomains() throws Exception { public void testGetFailureDomainDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -210,9 +231,12 @@ public void testGetFailureDomainDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -224,15 +248,17 @@ public void testGetFailureDomainDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.DOMAINS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.DOMAINS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -244,9 +270,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -262,9 +291,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -277,21 +309,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchSetDomainInput input = new BatchSetDomainInput(null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchSetDomainInput input = + new BatchSetDomainInput( + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java index 1ea84b99cfec3b..8f86e33158ad59 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainProposalMatcher.java @@ -6,13 +6,12 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.mockito.ArgumentMatcher; - public class CreateDomainProposalMatcher implements ArgumentMatcher { private MetadataChangeProposal left; public CreateDomainProposalMatcher(MetadataChangeProposal left) { - this.left = left; + this.left = left; } @Override @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean domainPropertiesMatch(GenericAspect left, GenericAspect right) { - DomainProperties leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DomainProperties.class - ); - - DomainProperties rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DomainProperties.class - ); + DomainProperties leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DomainProperties.class); + + DomainProperties rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DomainProperties.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 560a3865ce9e1a..6184760abfabda 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -15,49 +19,35 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.DomainKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; import com.linkedin.metadata.utils.GenericRecordUtils; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; - import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.DOMAIN_PROPERTIES_ASPECT_NAME; -import static org.testng.Assert.*; - - public class CreateDomainResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final CreateDomainInput TEST_INPUT = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - TEST_PARENT_DOMAIN_URN.toString() - ); + private static final CreateDomainInput TEST_INPUT = + new CreateDomainInput( + "test-id", "test-name", "test-description", TEST_PARENT_DOMAIN_URN.toString()); - private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = new CreateDomainInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateDomainInput TEST_INPUT_NO_PARENT_DOMAIN = + new CreateDomainInput("test-id", "test-name", "test-description", null); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public void testGetSuccess() throws Exception { // Create resolver @@ -65,15 +55,13 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -81,14 +69,17 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -107,11 +98,11 @@ public void testGetSuccess() throws Exception { proposal.setChangeType(ChangeType.UPSERT); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -120,24 +111,23 @@ public void testGetSuccessNoParentDomain() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_NO_PARENT_DOMAIN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); resolver.get(mockEnv).get(); @@ -154,11 +144,11 @@ public void testGetSuccessNoParentDomain() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(props)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateDomainProposalMatcher(proposal)), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat(new CreateDomainProposalMatcher(proposal)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -167,15 +157,13 @@ public void testGetInvalidParent() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -191,31 +179,32 @@ public void testGetNameConflict() throws Exception { EntityService mockService = Mockito.mock(EntityService.class); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(false); + Mockito.when(mockClient.exists(Mockito.eq(TEST_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(false); - Mockito.when(mockClient.exists( - Mockito.eq(TEST_PARENT_DOMAIN_URN), - Mockito.any(Authentication.class) - )).thenReturn(true); + Mockito.when( + mockClient.exists( + Mockito.eq(TEST_PARENT_DOMAIN_URN), Mockito.any(Authentication.class))) + .thenReturn(true); QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)) - )); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + TEST_INPUT.getName(), TEST_PARENT_DOMAIN_URN)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray(new SearchEntity().setEntity(TEST_DOMAIN_URN)))); DomainProperties domainProperties = new DomainProperties(); domainProperties.setDescription(TEST_INPUT.getDescription()); @@ -225,18 +214,21 @@ public void testGetNameConflict() throws Exception { EntityResponse entityResponse = new EntityResponse(); EnvelopedAspectMap envelopedAspectMap = new EnvelopedAspectMap(); - envelopedAspectMap.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); + envelopedAspectMap.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(domainProperties.data()))); entityResponse.setAspects(envelopedAspectMap); Map entityResponseMap = new HashMap<>(); entityResponseMap.put(TEST_DOMAIN_URN, entityResponse); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class) - )).thenReturn(entityResponseMap); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class))) + .thenReturn(entityResponseMap); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } @@ -255,9 +247,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -265,9 +256,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver @@ -278,4 +269,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java index 9bcdbe6d2a0e0a..5632654a26ad92 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DeleteDomainResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -10,10 +13,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteDomainResolverTest { private static final String TEST_URN = "urn:li:domain:test-id"; @@ -30,15 +29,21 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has 0 child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(0)); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -53,14 +58,20 @@ public void testDeleteWithChildDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Domain has child domains - Mockito.when(mockClient.filter(Mockito.eq("domain"), Mockito.any(), Mockito.any(), Mockito.eq(0), Mockito.eq(1), Mockito.any())) + Mockito.when( + mockClient.filter( + Mockito.eq("domain"), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(1), + Mockito.any())) .thenReturn(new SearchResult().setNumEntities(1)); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -76,8 +87,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java index 93fe3d00171606..9596abf55d04fc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/DomainEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -26,18 +29,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; -import static org.testng.Assert.*; - - public class DomainEntitiesResolverTest { - private static final DomainEntitiesInput TEST_INPUT = new DomainEntitiesInput( - null, - 0, - 20, - Collections.emptyList() - ); + private static final DomainEntitiesInput TEST_INPUT = + new DomainEntitiesInput(null, 0, 20, Collections.emptyList()); @Test public void testGetSuccess() throws Exception { @@ -47,35 +42,42 @@ public void testGetSuccess() throws Exception { final String childUrn = "urn:li:dataset:(test,test,test)"; final String domainUrn = "urn:li:domain:test-domain"; - final Criterion filterCriterion = new Criterion() - .setField("domains.keyword") - .setCondition(Condition.EQUAL) - .setValue(domainUrn); + final Criterion filterCriterion = + new Criterion() + .setField("domains.keyword") + .setCondition(Condition.EQUAL) + .setValue(domainUrn); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.eq(SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList())), - Mockito.eq("*"), - Mockito.eq( - new Filter().setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of(filterCriterion))) - )) - ), - Mockito.eq(0), - Mockito.eq(20), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(childUrn)) - ))) - .setMetadata(new SearchResultMetadata().setAggregations(new AggregationMetadataArray())) - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.eq( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList())), + Mockito.eq("*"), + Mockito.eq( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray(ImmutableList.of(filterCriterion)))))), + Mockito.eq(0), + Mockito.eq(20), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(childUrn))))) + .setMetadata( + new SearchResultMetadata().setAggregations(new AggregationMetadataArray()))); DomainEntitiesResolver resolver = new DomainEntitiesResolver(mockClient); @@ -95,6 +97,7 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getSearchResults().size(), 1); - assertEquals(resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); + assertEquals( + resolver.get(mockEnv).get().getSearchResults().get(0).getEntity().getUrn(), childUrn); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java index bd8a8f98de4974..ffc3e823d83510 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ListDomainsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -20,46 +25,43 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; -import static org.testng.Assert.assertThrows; - - public class ListDomainsResolverTest { private static final Urn TEST_DOMAIN_URN = Urn.createFromTuple("domain", "test-id"); private static final Urn TEST_PARENT_DOMAIN_URN = Urn.createFromTuple("domain", "test-parent-id"); - private static final ListDomainsInput TEST_INPUT = new ListDomainsInput( - 0, 20, null, TEST_PARENT_DOMAIN_URN.toString() - ); + private static final ListDomainsInput TEST_INPUT = + new ListDomainsInput(0, 20, null, TEST_PARENT_DOMAIN_URN.toString()); - private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = new ListDomainsInput( - 0, 20, null, null - ); + private static final ListDomainsInput TEST_INPUT_NO_PARENT_DOMAIN = + new ListDomainsInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(TEST_PARENT_DOMAIN_URN)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -74,7 +76,8 @@ public void testGetSuccess() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -82,22 +85,27 @@ public void testGetSuccessNoParentDomain() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(DomainUtils.buildParentDomainFilter(null)), - Mockito.eq(new SortCriterion().setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME).setOrder(SortOrder.DESCENDING)), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(DomainUtils.buildParentDomainFilter(null)), + Mockito.eq( + new SortCriterion() + .setField(DOMAIN_CREATED_TIME_INDEX_FIELD_NAME) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_DOMAIN_URN))))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); @@ -112,7 +120,8 @@ public void testGetSuccessNoParentDomain() throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getDomains().size(), 1); - assertEquals(resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getDomains().get(0).getUrn(), TEST_DOMAIN_URN.toString()); } @Test @@ -124,33 +133,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListDomainsResolver resolver = new ListDomainsResolver(mockClient); // Execute resolver @@ -161,4 +172,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java index 4059c180b0eb03..a0eff5d0574dbc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/MoveDomainResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.CorpuserUrn; @@ -17,52 +22,51 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class MoveDomainResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; private static final String PARENT_DOMAIN_URN = "urn:li:domain:00005397daf94708a8822b8106cfd451"; private static final String DOMAIN_URN = "urn:li:domain:11115397daf94708a8822b8106cfd451"; private static final MoveDomainInput INPUT = new MoveDomainInput(PARENT_DOMAIN_URN, DOMAIN_URN); - private static final MoveDomainInput INVALID_INPUT = new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); + private static final MoveDomainInput INVALID_INPUT = + new MoveDomainInput(CONTAINER_URN, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService, EntityClient mockClient) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(name, Urn.createFromString(PARENT_DOMAIN_URN))), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter( + name, Urn.createFromString(PARENT_DOMAIN_URN))), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(name); properties.setParentDomain(Urn.createFromString(PARENT_DOMAIN_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); } @Test @@ -77,11 +81,11 @@ public void testGetSuccess() throws Exception { setupTests(mockEnv, mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false)); } @Test @@ -97,10 +101,9 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(null); MoveDomainResolver resolver = new MoveDomainResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java index 7bd7c3afac001c..4c8ceff9c4f80c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/ParentDomainsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertEquals; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -13,15 +16,11 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentDomainsResolverTest { @Test @@ -38,58 +37,68 @@ public void testGetSuccessForDomain() throws Exception { domainEntity.setType(EntityType.DOMAIN); Mockito.when(mockEnv.getSource()).thenReturn(domainEntity); - final DomainProperties parentDomain1 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:11115397daf94708a8822b8106cfd451") - ).setName("test def"); - final DomainProperties parentDomain2 = new DomainProperties().setParentDomain(Urn.createFromString( - "urn:li:domain:22225397daf94708a8822b8106cfd451") - ).setName("test def 2"); + final DomainProperties parentDomain1 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:11115397daf94708a8822b8106cfd451")) + .setName("test def"); + final DomainProperties parentDomain2 = + new DomainProperties() + .setParentDomain(Urn.createFromString("urn:li:domain:22225397daf94708a8822b8106cfd451")) + .setName("test def 2"); Map domainAspects = new HashMap<>(); - domainAspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); + domainAspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentDomain1.data()))); Map parentDomain1Aspects = new HashMap<>(); - parentDomain1Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 1").setParentDomain(parentDomain2.getParentDomain()).data() - ))); + parentDomain1Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new DomainProperties() + .setName("domain parent 1") + .setParentDomain(parentDomain2.getParentDomain()) + .data()))); Map parentDomain2Aspects = new HashMap<>(); - parentDomain2Aspects.put(DOMAIN_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new DomainProperties().setName("domain parent 2").data() - ))); + parentDomain2Aspects.put( + DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new DomainProperties().setName("domain parent 2").data()))); - Mockito.when(mockClient.getV2( - Mockito.eq(domainUrn.getEntityType()), - Mockito.eq(domainUrn), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(domainUrn.getEntityType()), + Mockito.eq(domainUrn), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(domainAspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain1.getParentDomain().getEntityType()), - Mockito.eq(parentDomain1.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain1.getParentDomain().getEntityType()), + Mockito.eq(parentDomain1.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain1Aspects))); - Mockito.when(mockClient.getV2( - Mockito.eq(parentDomain2.getParentDomain().getEntityType()), - Mockito.eq(parentDomain2.getParentDomain()), - Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); + Mockito.when( + mockClient.getV2( + Mockito.eq(parentDomain2.getParentDomain().getEntityType()), + Mockito.eq(parentDomain2.getParentDomain()), + Mockito.eq(Collections.singleton(DOMAIN_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentDomain2Aspects))); ParentDomainsResolver resolver = new ParentDomainsResolver(mockClient); ParentDomainsResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(3)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(3)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getDomains().get(0).getUrn(), parentDomain1.getParentDomain().toString()); assertEquals(result.getDomains().get(1).getUrn(), parentDomain2.getParentDomain().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java index 92fb26288aa1dc..ad5ad2315ce43c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/SetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; private static final String TEST_NEW_DOMAIN_URN = "urn:li:domain:test-id-2"; @@ -43,16 +43,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -68,47 +71,52 @@ public void testGetSuccessNoExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -124,23 +132,21 @@ public void testGetSuccessExistingDomains() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final Domains newDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final Domains newDomains = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(Urn.createFromString(TEST_NEW_DOMAIN_URN)))); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_NEW_DOMAIN_URN))); } @Test @@ -149,16 +155,19 @@ public void testGetFailureDomainDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -174,9 +183,8 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -185,16 +193,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -210,9 +221,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -230,18 +240,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetDomainResolver resolver = new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetDomainResolver resolver = + new SetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -252,4 +262,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java index decda39943dde3..7e6e2581688985 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/UnsetDomainResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,14 +30,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UnsetDomainResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_EXISTING_DOMAIN_URN = "urn:li:domain:test-id"; @Test @@ -42,16 +42,19 @@ public void testGetSuccessNoExistingDomains() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -66,43 +69,46 @@ public void testGetSuccessNoExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testGetSuccessExistingDomains() throws Exception { - Domains originalDomains = new Domains().setDomains(new UrnArray(ImmutableList.of(Urn.createFromString( - TEST_EXISTING_DOMAIN_URN)))); + Domains originalDomains = + new Domains() + .setDomains( + new UrnArray(ImmutableList.of(Urn.createFromString(TEST_EXISTING_DOMAIN_URN)))); // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(originalDomains.data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAINS_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(originalDomains.data()))))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -117,18 +123,15 @@ public void testGetSuccessExistingDomains() throws Exception { resolver.get(mockEnv).get(); final Domains newDomains = new Domains().setDomains(new UrnArray(Collections.emptyList())); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - DOMAINS_ASPECT_NAME, newDomains); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), DOMAINS_ASPECT_NAME, newDomains); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -137,16 +140,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Test setting the domain - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.DOMAINS_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -160,9 +166,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -179,18 +184,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - UnsetDomainResolver resolver = new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + UnsetDomainResolver resolver = + new UnsetDomainResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -200,4 +205,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java index f1d44fcb472556..45a17744a26971 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/embed/UpdateEmbedResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.embed; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -25,29 +29,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpdateEmbedResolverTest { private static final String TEST_ENTITY_URN = "urn:li:dashboard:(looker,1)"; private static final String TEST_RENDER_URL = "https://www.google.com"; - private static final UpdateEmbedInput TEST_EMBED_INPUT = new UpdateEmbedInput( - TEST_ENTITY_URN, - TEST_RENDER_URL - ); + private static final UpdateEmbedInput TEST_EMBED_INPUT = + new UpdateEmbedInput(TEST_ENTITY_URN, TEST_RENDER_URL); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); @Test public void testGetSuccessNoExistingEmbed() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -62,14 +61,15 @@ public void testGetSuccessNoExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); - verifySingleIngestProposal(mockService, 1, proposal);; + verifySingleIngestProposal(mockService, 1, proposal); + ; - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -79,10 +79,12 @@ public void testGetSuccessExistingEmbed() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(EMBED_ASPECT_NAME), - Mockito.eq(0L))).thenReturn(originalEmbed); + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(EMBED_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalEmbed); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -97,14 +99,14 @@ public void testGetSuccessExistingEmbed() throws Exception { resolver.get(mockEnv).get(); final Embed newEmbed = new Embed().setRenderUrl(TEST_RENDER_URL); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - EMBED_ASPECT_NAME, newEmbed); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), EMBED_ASPECT_NAME, newEmbed); verifySingleIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -112,16 +114,19 @@ public void testGetFailureEntityDoesNotExist() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.DASHBOARD_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DASHBOARD_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(EMBED_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.DASHBOARD_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects(new EnvelopedAspectMap(Collections.emptyMap())))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -136,11 +141,10 @@ public void testGetFailureEntityDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - );; + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + ; } @Test @@ -156,20 +160,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), - Mockito.eq(false) - ); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateEmbedResolver resolver = new UpdateEmbedResolver(mockService); // Execute resolver @@ -180,4 +182,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java index cde2739b2bcc65..fa8b1d6a747ca2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityExistsResolverTest.java @@ -1,14 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class EntityExistsResolverTest { private static final String ENTITY_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java index 913ea4602faf00..d9d5e643057cea 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/entity/EntityPrivilegesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.entity; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Chart; @@ -14,14 +17,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class EntityPrivilegesResolverTest { final String glossaryTermUrn = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; @@ -29,7 +28,8 @@ public class EntityPrivilegesResolverTest { final String datasetUrn = "urn:li:dataset:(urn:li:dataPlatform:kafka,protobuf.MessageA,TEST)"; final String chartUrn = "urn:li:chart:(looker,baz1)"; final String dashboardUrn = "urn:li:dashboard:(looker,dashboards.1)"; - final String dataJobUrn = "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; + final String dataJobUrn = + "urn:li:dataJob:(urn:li:dataFlow:(spark,test_machine.sparkTestApp,local),QueryExecId_31)"; private DataFetchingEnvironment setUpTestWithPermissions(Entity entity) { QueryContext mockContext = getMockAllowContext(); @@ -115,11 +115,13 @@ public void testGetFailure() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); DataFetchingEnvironment mockEnv = setUpTestWithoutPermissions(glossaryNode); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class)); EntityPrivilegesResolver resolver = new EntityPrivilegesResolver(mockClient); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 26c13186c4a817..287d270ab569ca 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -10,15 +14,9 @@ import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.ExecutionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; -import static org.testng.Assert.*; - - public class AddRelatedTermsResolverTest { private static final String TEST_ENTITY_URN = "urn:li:glossaryTerm:test-id-0"; @@ -28,10 +26,11 @@ public class AddRelatedTermsResolverTest { private EntityService setUpService() { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); return mockService; } @@ -48,24 +47,22 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -80,24 +77,22 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test @@ -110,9 +105,9 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_ENTITY_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_ENTITY_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -130,9 +125,9 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - DATASET_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(DATASET_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -151,9 +146,9 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -172,9 +167,9 @@ public void testGetFailAddToNonExistentUrn() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -193,9 +188,9 @@ public void testGetFailAddToNonTerm() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(DATASET_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + DATASET_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -215,15 +210,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); } - } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java index 3b47514d87181b..2a36d77716ab75 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,39 +11,27 @@ import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; -import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.metadata.entity.EntityService; +import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static com.linkedin.metadata.Constants.*; - - public class CreateGlossaryNodeResolverTest { - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -47,8 +39,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_NODE_ENTITY_NAME, - GLOSSARY_NODE_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_NODE_ENTITY_NAME, GLOSSARY_NODE_INFO_ASPECT_NAME, props); } @Test @@ -72,16 +64,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -89,16 +79,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,15 +94,13 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index 2dbe637d160572..6653b19d6ef2bd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -1,61 +1,53 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; +import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.metadata.search.SearchEntity; import com.linkedin.metadata.search.SearchEntityArray; import com.linkedin.metadata.search.SearchResult; -import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static com.linkedin.metadata.Constants.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class CreateGlossaryTermResolverTest { private static final String EXISTING_TERM_URN = "urn:li:glossaryTerm:testing12345"; - private static final CreateGlossaryEntityInput TEST_INPUT = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = new CreateGlossaryEntityInput( - "test-id", - "test-name", - null, - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); - - private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = new CreateGlossaryEntityInput( - "test-id", - "test-name", - "test-description", - null - ); + private static final CreateGlossaryEntityInput TEST_INPUT = + new CreateGlossaryEntityInput( + "test-id", + "test-name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + private static final CreateGlossaryEntityInput TEST_INPUT_NO_DESCRIPTION = + new CreateGlossaryEntityInput( + "test-id", "test-name", null, "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); + + private static final CreateGlossaryEntityInput TEST_INPUT_NO_PARENT_NODE = + new CreateGlossaryEntityInput("test-id", "test-name", "test-description", null); private final String parentNodeUrn = "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"; @@ -63,8 +55,8 @@ private MetadataChangeProposal setupTest( DataFetchingEnvironment mockEnv, CreateGlossaryEntityInput input, String description, - String parentNode - ) throws Exception { + String parentNode) + throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); @@ -80,8 +72,8 @@ private MetadataChangeProposal setupTest( final GlossaryNodeUrn parent = GlossaryNodeUrn.createFromString(parentNode); props.setParentNode(parent); } - return MutationUtils.buildMetadataChangeProposalWithKey(key, GLOSSARY_TERM_ENTITY_NAME, - GLOSSARY_TERM_INFO_ASPECT_NAME, props); + return MutationUtils.buildMetadataChangeProposalWithKey( + key, GLOSSARY_TERM_ENTITY_NAME, GLOSSARY_TERM_INFO_ASPECT_NAME, props); } @Test @@ -89,16 +81,14 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -106,16 +96,14 @@ public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -123,16 +111,14 @@ public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = initMockClient(); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); + final MetadataChangeProposal proposal = + setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -140,73 +126,71 @@ public void testGetFailureExistingTermSameName() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities( - new SearchEntityArray(new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))) - )); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + new SearchEntity().setEntity(UrnUtils.getUrn(EXISTING_TERM_URN))))); Map result = new HashMap<>(); EnvelopedAspectMap map = new EnvelopedAspectMap(); GlossaryTermInfo termInfo = new GlossaryTermInfo().setName("Duplicated Name"); - map.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); + map.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(termInfo.data()))); result.put(UrnUtils.getUrn(EXISTING_TERM_URN), new EntityResponse().setAspects(map)); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(result); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(result); EntityService mockService = getMockEntityService(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - CreateGlossaryEntityInput input = new CreateGlossaryEntityInput( - "test-id", - "Duplicated Name", - "test-description", - "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b" - ); + CreateGlossaryEntityInput input = + new CreateGlossaryEntityInput( + "test-id", + "Duplicated Name", + "test-description", + "urn:li:glossaryNode:12372c2ec7754c308993202dc44f548b"); setupTest(mockEnv, input, "test-description", parentNodeUrn); CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private EntityClient initMockClient() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when( - mockClient.filter( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(1000), - Mockito.any() - ) - ).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + mockClient.filter( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(1000), + Mockito.any())) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); Mockito.when( - mockClient.batchGetV2( - Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), - Mockito.any(), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any() - ) - ).thenReturn(new HashMap<>()); + mockClient.batchGetV2( + Mockito.eq(GLOSSARY_TERM_ENTITY_NAME), + Mockito.any(), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any())) + .thenReturn(new HashMap<>()); return mockClient; } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java index 94f0d0b7a11434..7229d2acf763d5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/DeleteGlossaryEntityResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -7,19 +12,14 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockEntityService; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class DeleteGlossaryEntityResolverTest { - private static final String TEST_TERM_URN = "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; + private static final String TEST_TERM_URN = + "urn:li:glossaryTerm:12372c2ec7754c308993202dc44f548b"; @Test public void testGetSuccess() throws Exception { @@ -33,26 +33,27 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_TERM_URN); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_TERM_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_TERM_URN)), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_URN))).thenReturn(true); - DeleteGlossaryEntityResolver resolver = new DeleteGlossaryEntityResolver(mockClient, mockService); + DeleteGlossaryEntityResolver resolver = + new DeleteGlossaryEntityResolver(mockClient, mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java index 677516e9404e8a..b879baf1e65dcd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryNodesResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,16 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryNodesResolverTest { - final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput( - 0, 100 - ); + final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryNodeUrn1 = "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451"; final String glossaryNodeUrn2 = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -42,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryNodeUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryNodeUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryNodesResolver resolver = new GetRootGlossaryNodesResolver(mockClient); GetRootGlossaryNodesResult result = resolver.get(mockEnv).get(); @@ -64,24 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); - assertEquals(result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); + assertEquals( + result.getNodes().get(0).getUrn(), Urn.createFromString(glossaryNodeUrn1).toString()); + assertEquals( + result.getNodes().get(1).getUrn(), Urn.createFromString(glossaryNodeUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java index 5aba32108b7db0..201bea752d53f0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GetRootGlossaryTermsResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -22,14 +24,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class GetRootGlossaryTermsResolverTest { final GetRootGlossaryEntitiesInput testInput = new GetRootGlossaryEntitiesInput(0, 100); final String glossaryTermUrn1 = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; final String glossaryTermUrn2 = "urn:li:glossaryTerm:22225397daf94708a8822b8106cfd451"; - @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); @@ -40,21 +39,23 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); Mockito.when(mockEnv.getArgument("input")).thenReturn(testInput); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), - Mockito.eq(buildGlossaryEntitiesFilter()), - Mockito.eq(null), - Mockito.eq(0), - Mockito.eq(100), - Mockito.any(Authentication.class) - )).thenReturn( - new SearchResult().setEntities(new SearchEntityArray(ImmutableSet.of( - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn1)), - new SearchEntity() - .setEntity(Urn.createFromString(glossaryTermUrn2)) - ))).setFrom(0).setNumEntities(2) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.GLOSSARY_TERM_ENTITY_NAME), + Mockito.eq(buildGlossaryEntitiesFilter()), + Mockito.eq(null), + Mockito.eq(0), + Mockito.eq(100), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setEntities( + new SearchEntityArray( + ImmutableSet.of( + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn1)), + new SearchEntity().setEntity(Urn.createFromString(glossaryTermUrn2))))) + .setFrom(0) + .setNumEntities(2)); GetRootGlossaryTermsResolver resolver = new GetRootGlossaryTermsResolver(mockClient); GetRootGlossaryTermsResult result = resolver.get(mockEnv).get(); @@ -62,23 +63,23 @@ public void testGetSuccess() throws Exception { assertEquals(result.getCount(), 2); assertEquals(result.getStart(), 0); assertEquals(result.getTotal(), 2); - assertEquals(result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); - assertEquals(result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); + assertEquals( + result.getTerms().get(0).getUrn(), Urn.createFromString(glossaryTermUrn1).toString()); + assertEquals( + result.getTerms().get(1).getUrn(), Urn.createFromString(glossaryTermUrn2).toString()); } private Filter buildGlossaryEntitiesFilter() { - CriterionArray array = new CriterionArray( - ImmutableList.of( - new Criterion() - .setField("hasParentNode") - .setValue("false") - .setCondition(Condition.EQUAL) - )); + CriterionArray array = + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("hasParentNode") + .setValue("false") + .setCondition(Condition.EQUAL))); final Filter filter = new Filter(); - filter.setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(array) - ))); + filter.setOr( + new ConjunctiveCriterionArray(ImmutableList.of(new ConjunctiveCriterion().setAnd(array)))); return filter; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java index 8bfc32e1999ae2..969fda541d6a66 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/GlossaryUtilsTest.java @@ -1,33 +1,32 @@ package com.linkedin.datahub.graphql.resolvers.glossary; -import com.google.common.collect.ImmutableSet; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; -import com.datahub.plugins.auth.authorization.Authorizer; import com.datahub.authorization.EntitySpec; +import com.datahub.plugins.auth.authorization.Authorizer; +import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.resolvers.mutate.util.GlossaryUtils; import com.linkedin.entity.Aspect; -import com.linkedin.entity.client.EntityClient; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.metadata.Constants; +import java.util.HashMap; +import java.util.Map; +import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.Optional; -import java.util.Map; -import java.util.HashMap; - -import static org.testng.Assert.*; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; - public class GlossaryUtilsTest { private final String userUrn = "urn:li:corpuser:authorized"; @@ -44,67 +43,87 @@ private void setUpTests() throws Exception { Mockito.when(mockContext.getActorUrn()).thenReturn(userUrn); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node2") - ); - GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:parent_node3") - ); - + GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node2")); + GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode(GlossaryNodeUrn.createFromString("urn:li:glossaryNode:parent_node3")); + GlossaryNodeInfo parentNode3 = new GlossaryNodeInfo(); - + Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode1.getParentNode()).data() - ))); - + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode1.getParentNode()) + .data()))); + Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 2") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode3Aspects = new HashMap<>(); - parentNode3Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 3").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn1), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn2), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), - Mockito.eq(parentNodeUrn3), - Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); - - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + parentNode3Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 3").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn1), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn2), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(Constants.GLOSSARY_NODE_ENTITY_NAME), + Mockito.eq(parentNodeUrn3), + Mockito.eq(ImmutableSet.of(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode3Aspects))); + + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); } - private void mockAuthRequest(String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { - final AuthorizationRequest authorizationRequest = new AuthorizationRequest( - userUrn, - privilege, - resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty() - ); + private void mockAuthRequest( + String privilege, AuthorizationResult.Type allowOrDeny, EntitySpec resourceSpec) { + final AuthorizationRequest authorizationRequest = + new AuthorizationRequest( + userUrn, + privilege, + resourceSpec != null ? Optional.of(resourceSpec) : Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(allowOrDeny); Mockito.when(mockAuthorizer.authorize(Mockito.eq(authorizationRequest))).thenReturn(result); @@ -150,7 +169,8 @@ public void testCanManageChildrenEntitiesAuthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn, mockClient)); @@ -162,7 +182,8 @@ public void testCanManageChildrenEntitiesUnauthorized() throws Exception { // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); + final EntitySpec resourceSpec = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn.toString()); mockAuthRequest("MANAGE_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec); @@ -175,13 +196,16 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorized() throws Exceptio // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -193,13 +217,16 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorized() throws Except // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -211,10 +238,12 @@ public void testCanManageChildrenRecursivelyEntitiesAuthorizedLevel2() throws Ex // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.ALLOW, resourceSpec2); - final EntitySpec resourceSpec1 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); + final EntitySpec resourceSpec1 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn1.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec1); assertTrue(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn1, mockClient)); @@ -226,10 +255,12 @@ public void testCanManageChildrenRecursivelyEntitiesUnauthorizedLevel2() throws // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); - final EntitySpec resourceSpec2 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); + final EntitySpec resourceSpec2 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn2.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec2); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn2, mockClient)); @@ -241,7 +272,8 @@ public void testCanManageChildrenRecursivelyEntitiesNoLevel2() throws Exception // they do NOT have the MANAGE_GLOSSARIES platform privilege mockAuthRequest("MANAGE_GLOSSARIES", AuthorizationResult.Type.DENY, null); - final EntitySpec resourceSpec3 = new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); + final EntitySpec resourceSpec3 = + new EntitySpec(parentNodeUrn.getEntityType(), parentNodeUrn3.toString()); mockAuthRequest("MANAGE_ALL_GLOSSARY_CHILDREN", AuthorizationResult.Type.DENY, resourceSpec3); assertFalse(GlossaryUtils.canManageChildrenEntities(mockContext, parentNodeUrn3, mockClient)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java index 06dff7611fac86..446f58bec73aa1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/ParentNodesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; +import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.GlossaryNodeUrn; import com.linkedin.common.urn.Urn; @@ -16,17 +21,11 @@ import com.linkedin.glossary.GlossaryNodeInfo; import com.linkedin.glossary.GlossaryTermInfo; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.HashMap; import java.util.Map; - -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_ENTITY_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_NODE_INFO_ASPECT_NAME; -import static com.linkedin.metadata.Constants.GLOSSARY_TERM_INFO_ASPECT_NAME; -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class ParentNodesResolverTest { @Test @@ -43,76 +42,94 @@ public void testGetSuccessForTerm() throws Exception { termEntity.setType(EntityType.GLOSSARY_TERM); Mockito.when(mockEnv.getSource()).thenReturn(termEntity); - final GlossaryTermInfo parentNode1 = new GlossaryTermInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryTermInfo parentNode1 = + new GlossaryTermInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map glossaryTermAspects = new HashMap<>(); - glossaryTermAspects.put(GLOSSARY_TERM_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryTermAspects.put( + GLOSSARY_TERM_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(termUrn.getEntityType()), - Mockito.eq(termUrn), - Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(termUrn.getEntityType()), + Mockito.eq(termUrn), + Mockito.eq(Collections.singleton(GLOSSARY_TERM_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryTermAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); @@ -132,78 +149,96 @@ public void testGetSuccessForNode() throws Exception { nodeEntity.setType(EntityType.GLOSSARY_NODE); Mockito.when(mockEnv.getSource()).thenReturn(nodeEntity); - final GlossaryNodeInfo parentNode1 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451") - ).setDefinition("test def"); - final GlossaryNodeInfo parentNode2 = new GlossaryNodeInfo().setParentNode(GlossaryNodeUrn.createFromString( - "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451") - ).setDefinition("test def 2"); + final GlossaryNodeInfo parentNode1 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:11115397daf94708a8822b8106cfd451")) + .setDefinition("test def"); + final GlossaryNodeInfo parentNode2 = + new GlossaryNodeInfo() + .setParentNode( + GlossaryNodeUrn.createFromString( + "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451")) + .setDefinition("test def 2"); Map glossaryNodeAspects = new HashMap<>(); - glossaryNodeAspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); + glossaryNodeAspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(parentNode1.data()))); Map parentNode1Aspects = new HashMap<>(); - parentNode1Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 1").setParentNode(parentNode2.getParentNode()).data() - ))); + parentNode1Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect( + new GlossaryNodeInfo() + .setDefinition("node parent 1") + .setParentNode(parentNode2.getParentNode()) + .data()))); Map parentNode2Aspects = new HashMap<>(); - parentNode2Aspects.put(GLOSSARY_NODE_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( - new GlossaryNodeInfo().setDefinition("node parent 2").data() - ))); - - Mockito.when(mockClient.getV2( - Mockito.eq(nodeUrn.getEntityType()), - Mockito.eq(nodeUrn), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode1.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode1.getParentNode().getEntityType()), - Mockito.eq(parentNode1.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setEntityName(GLOSSARY_NODE_ENTITY_NAME) - .setUrn(parentNode2.getParentNode()) - .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); - - Mockito.when(mockClient.getV2( - Mockito.eq(parentNode2.getParentNode().getEntityType()), - Mockito.eq(parentNode2.getParentNode()), - Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + parentNode2Aspects.put( + GLOSSARY_NODE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(new GlossaryNodeInfo().setDefinition("node parent 2").data()))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(nodeUrn.getEntityType()), + Mockito.eq(nodeUrn), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(glossaryNodeAspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode1.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode1.getParentNode().getEntityType()), + Mockito.eq(parentNode1.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode1Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setEntityName(GLOSSARY_NODE_ENTITY_NAME) + .setUrn(parentNode2.getParentNode()) + .setAspects(new EnvelopedAspectMap(parentNode2Aspects))); + + Mockito.when( + mockClient.getV2( + Mockito.eq(parentNode2.getParentNode().getEntityType()), + Mockito.eq(parentNode2.getParentNode()), + Mockito.eq(Collections.singleton(GLOSSARY_NODE_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn(new EntityResponse().setAspects(new EnvelopedAspectMap(parentNode2Aspects))); ParentNodesResolver resolver = new ParentNodesResolver(mockClient); ParentNodesResult result = resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(5)).getV2( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any() - ); + Mockito.verify(mockClient, Mockito.times(5)) + .getV2(Mockito.any(), Mockito.any(), Mockito.any(), Mockito.any()); assertEquals(result.getCount(), 2); assertEquals(result.getNodes().get(0).getUrn(), parentNode1.getParentNode().toString()); assertEquals(result.getNodes().get(1).getUrn(), parentNode2.getParentNode().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 3906d1188cb172..47de668b2c9dc3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.google.common.collect.ImmutableList; import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; @@ -12,15 +16,10 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.Arrays; import java.util.concurrent.ExecutionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class RemoveRelatedTermsResolverTest { @@ -35,10 +34,11 @@ public void testGetSuccessIsA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -47,17 +47,16 @@ public void testGetSuccessIsA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -67,10 +66,11 @@ public void testGetSuccessHasA() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setHasRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -79,26 +79,26 @@ public void testGetSuccessHasA() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifySingleIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test public void testFailAspectDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -107,9 +107,9 @@ public void testFailAspectDoesNotExist() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.hasA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.hasA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -124,10 +124,11 @@ public void testFailNoPermissions() throws Exception { final GlossaryRelatedTerms relatedTerms = new GlossaryRelatedTerms(); relatedTerms.setIsRelatedTerms(new GlossaryTermUrnArray(Arrays.asList(term1Urn, term2Urn))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_RELATED_TERM_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(relatedTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -136,16 +137,15 @@ public void testFailNoPermissions() throws Exception { QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - RelatedTermsInput input = new RelatedTermsInput(TEST_ENTITY_URN, ImmutableList.of( - TEST_TERM_1_URN - ), TermRelationshipType.isA); + RelatedTermsInput input = + new RelatedTermsInput( + TEST_ENTITY_URN, ImmutableList.of(TEST_TERM_1_URN), TermRelationshipType.isA); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); verifyNoIngestProposal(mockService); - Mockito.verify(mockService, Mockito.times(0)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(0)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index eee9cfbae8fcb2..3972715fcefb17 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; @@ -19,16 +23,10 @@ import com.linkedin.metadata.search.SearchResult; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateNameResolverTest { private static final String NEW_NAME = "New Name"; @@ -40,23 +38,23 @@ public class UpdateNameResolverTest { private static final UpdateNameInput INPUT_FOR_DOMAIN = new UpdateNameInput(NEW_NAME, DOMAIN_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(NEW_NAME); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -88,16 +86,16 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); assertTrue(resolver.get(mockEnv).get()); @@ -118,25 +116,27 @@ public void testGetSuccessForDomain() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(DOMAIN_URN), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(DOMAIN_URN), Constants.DOMAIN_PROPERTIES_ASPECT_NAME, 0)) .thenReturn(new DomainProperties().setName(name)); - Mockito.when(mockClient.filter( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), - Mockito.eq(null), - Mockito.any(Integer.class), - Mockito.any(Integer.class), - Mockito.any(Authentication.class) - )).thenReturn(new SearchResult().setEntities(new SearchEntityArray())); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq( + DomainUtils.buildNameAndParentDomainFilter(INPUT_FOR_DOMAIN.getName(), null)), + Mockito.eq(null), + Mockito.any(Integer.class), + Mockito.any(Integer.class), + Mockito.any(Authentication.class))) + .thenReturn(new SearchResult().setEntities(new SearchEntityArray())); DomainProperties properties = new DomainProperties(); properties.setName(NEW_NAME); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(DOMAIN_URN), - DOMAIN_PROPERTIES_ASPECT_NAME, properties); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(DOMAIN_URN), DOMAIN_PROPERTIES_ASPECT_NAME, properties); UpdateNameResolver resolver = new UpdateNameResolver(mockService, mockClient); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index a78c28890fecf3..74a59b10a40b01 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.glossary; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.assertThrows; +import static org.testng.Assert.assertTrue; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.urn.GlossaryNodeUrn; @@ -15,45 +20,43 @@ import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; import org.mockito.Mockito; import org.testng.annotations.Test; -import java.net.URISyntaxException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.assertThrows; -import static org.testng.Assert.assertTrue; - public class UpdateParentNodeResolverTest { private static final String CONTAINER_URN = "urn:li:container:00005397daf94708a8822b8106cfd451"; - private static final String PARENT_NODE_URN = "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; + private static final String PARENT_NODE_URN = + "urn:li:glossaryNode:00005397daf94708a8822b8106cfd451"; private static final String TERM_URN = "urn:li:glossaryTerm:11115397daf94708a8822b8106cfd451"; private static final String NODE_URN = "urn:li:glossaryNode:22225397daf94708a8822b8106cfd451"; - private static final UpdateParentNodeInput INPUT = new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); - private static final UpdateParentNodeInput INPUT_WITH_NODE = new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); - private static final UpdateParentNodeInput INVALID_INPUT = new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT = + new UpdateParentNodeInput(PARENT_NODE_URN, TERM_URN); + private static final UpdateParentNodeInput INPUT_WITH_NODE = + new UpdateParentNodeInput(PARENT_NODE_URN, NODE_URN); + private static final UpdateParentNodeInput INVALID_INPUT = + new UpdateParentNodeInput(CONTAINER_URN, TERM_URN); private static final CorpuserUrn TEST_ACTOR_URN = new CorpuserUrn("test"); - private MetadataChangeProposal setupTests(DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { + private MetadataChangeProposal setupTests( + DataFetchingEnvironment mockEnv, EntityService mockService) throws Exception { QueryContext mockContext = getMockAllowContext(); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(TERM_URN), - Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(TERM_URN), Constants.GLOSSARY_TERM_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryTermInfo().setName(name)); GlossaryTermInfo info = new GlossaryTermInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - return MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TERM_URN), - GLOSSARY_TERM_INFO_ASPECT_NAME, info); + return MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TERM_URN), GLOSSARY_TERM_INFO_ASPECT_NAME, info); } @Test @@ -61,7 +64,8 @@ public void testGetSuccess() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -77,7 +81,8 @@ public void testGetSuccessForNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(NODE_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT_WITH_NODE); @@ -87,17 +92,17 @@ public void testGetSuccessForNode() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); final String name = "test name"; - Mockito.when(mockService.getAspect( - Urn.createFromString(NODE_URN), - Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, - 0)) + Mockito.when( + mockService.getAspect( + Urn.createFromString(NODE_URN), Constants.GLOSSARY_NODE_INFO_ASPECT_NAME, 0)) .thenReturn(new GlossaryNodeInfo().setName(name)); GlossaryNodeInfo info = new GlossaryNodeInfo(); info.setName(name); info.setParentNode(GlossaryNodeUrn.createFromString(PARENT_NODE_URN)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(NODE_URN), - GLOSSARY_NODE_INFO_ASPECT_NAME, info); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(NODE_URN), GLOSSARY_NODE_INFO_ASPECT_NAME, info); UpdateParentNodeResolver resolver = new UpdateParentNodeResolver(mockService, mockClient); @@ -110,7 +115,8 @@ public void testGetFailureEntityDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(false); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -126,7 +132,8 @@ public void testGetFailureNodeDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(false); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(false); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INPUT); @@ -142,7 +149,8 @@ public void testGetFailureParentIsNotNode() throws Exception { EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockService.exists(Urn.createFromString(TERM_URN))).thenReturn(true); - Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))).thenReturn(true); + Mockito.when(mockService.exists(GlossaryNodeUrn.createFromString(PARENT_NODE_URN))) + .thenReturn(true); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); Mockito.when(mockEnv.getArgument("input")).thenReturn(INVALID_INPUT); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java index a20c84d11ba9fd..19d9dd20d3f805 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/AddGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AddGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java index 876de633bd6562..a29680a6de52de 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/CreateGroupResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateGroupResolverTest { private static final String GROUP_ID = "id"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java index 73b0be96fce176..601d5e08a42330 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/group/RemoveGroupMembersResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.group; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.group.GroupService; import com.linkedin.common.Origin; @@ -13,11 +17,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class RemoveGroupMembersResolverTest { private static final String GROUP_URN_STRING = "urn:li:corpGroup:testNewGroup"; private static final String USER_URN_STRING = "urn:li:corpuser:test"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java index dae0758f6a2f65..e5cb43c4dab617 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestTestUtils.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -21,15 +23,14 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.Mockito; -import static org.testng.Assert.*; - - public class IngestTestUtils { - public static final Urn TEST_INGESTION_SOURCE_URN = Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); - public static final Urn TEST_SECRET_URN = Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); - public static final Urn TEST_EXECUTION_REQUEST_URN = Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); - + public static final Urn TEST_INGESTION_SOURCE_URN = + Urn.createFromTuple(Constants.INGESTION_SOURCE_ENTITY_NAME, "test"); + public static final Urn TEST_SECRET_URN = + Urn.createFromTuple(Constants.SECRETS_ENTITY_NAME, "TEST_SECRET"); + public static final Urn TEST_EXECUTION_REQUEST_URN = + Urn.createFromTuple(Constants.EXECUTION_REQUEST_ENTITY_NAME, "1234"); public static QueryContext getMockAllowContext() { QueryContext mockContext = Mockito.mock(QueryContext.class); @@ -63,8 +64,13 @@ public static DataHubIngestionSourceInfo getTestIngestionSourceInfo() { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setName("My Test Source"); info.setType("mysql"); - info.setSchedule(new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); - info.setConfig(new DataHubIngestionSourceConfig().setVersion("0.8.18").setRecipe("{}").setExecutorId("executor id")); + info.setSchedule( + new DataHubIngestionSourceSchedule().setTimezone("UTC").setInterval("* * * * *")); + info.setConfig( + new DataHubIngestionSourceConfig() + .setVersion("0.8.18") + .setRecipe("{}") + .setExecutorId("executor id")); return info; } @@ -78,15 +84,18 @@ public static DataHubSecretValue getTestSecretValue() { public static ExecutionRequestInput getTestExecutionRequestInput() { ExecutionRequestInput input = new ExecutionRequestInput(); - input.setArgs(new StringMap( - ImmutableMap.of( - "recipe", "my-custom-recipe", - "version", "0.8.18") - )); + input.setArgs( + new StringMap( + ImmutableMap.of( + "recipe", "my-custom-recipe", + "version", "0.8.18"))); input.setTask("RUN_INGEST"); input.setExecutorId("default"); input.setRequestedAt(0L); - input.setSource(new ExecutionRequestSource().setIngestionSource(TEST_INGESTION_SOURCE_URN).setType("SCHEDULED_INGESTION")); + input.setSource( + new ExecutionRequestSource() + .setIngestionSource(TEST_INGESTION_SOURCE_URN) + .setType("SCHEDULED_INGESTION")); return input; } @@ -99,7 +108,8 @@ public static ExecutionRequestResult getTestExecutionRequestResult() { return result; } - public static void verifyTestIngestionSourceGraphQL(IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { + public static void verifyTestIngestionSourceGraphQL( + IngestionSource ingestionSource, DataHubIngestionSourceInfo info) { assertEquals(ingestionSource.getUrn(), TEST_INGESTION_SOURCE_URN.toString()); assertEquals(ingestionSource.getName(), info.getName()); assertEquals(ingestionSource.getType(), info.getType()); @@ -134,5 +144,5 @@ public static void verifyTestExecutionRequest( assertEquals(executionRequest.getResult().getStartTimeMs(), result.getStartTimeMs()); } - private IngestTestUtils() { } + private IngestTestUtils() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java index 12045b93614691..3de88333b959d1 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/IngestionAuthUtilsTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest; +import static org.testng.Assert.*; + import com.datahub.authorization.AuthorizationRequest; import com.datahub.authorization.AuthorizationResult; import com.datahub.plugins.auth.authorization.Authorizer; @@ -7,7 +9,6 @@ import java.util.Optional; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; public class IngestionAuthUtilsTest { @@ -16,11 +17,9 @@ public void testCanManageIngestionAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:authorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -37,11 +36,9 @@ public void testCanManageIngestionUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_INGESTION", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_INGESTION", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); @@ -58,11 +55,8 @@ public void testCanManageSecretsAuthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:authorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest("urn:li:corpuser:authorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.ALLOW); @@ -79,11 +73,9 @@ public void testCanManageSecretsUnauthorized() throws Exception { QueryContext mockContext = Mockito.mock(QueryContext.class); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest request = new AuthorizationRequest( - "urn:li:corpuser:unauthorized", - "MANAGE_SECRETS", - Optional.empty() - ); + AuthorizationRequest request = + new AuthorizationRequest( + "urn:li:corpuser:unauthorized", "MANAGE_SECRETS", Optional.empty()); AuthorizationResult result = Mockito.mock(AuthorizationResult.class); Mockito.when(result.getType()).thenReturn(AuthorizationResult.Type.DENY); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java index e7226c6e4db080..3d0c24b9aa0222 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CancelIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -18,35 +21,36 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CancelIngestionExecutionRequestResolverTest { - private static final CancelIngestionExecutionRequestInput TEST_INPUT = new CancelIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString(), - TEST_EXECUTION_REQUEST_URN.toString() - ); + private static final CancelIngestionExecutionRequestInput TEST_INPUT = + new CancelIngestionExecutionRequestInput( + TEST_INGESTION_SOURCE_URN.toString(), TEST_EXECUTION_REQUEST_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -57,18 +61,19 @@ Constants.INGESTION_INFO_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect( resolver.get(mockEnv).get(); // Verify ingest proposal has been called to create a Signal request. - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -77,19 +82,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - CancelIngestionExecutionRequestResolver resolver = new CancelIngestionExecutionRequestResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + CancelIngestionExecutionRequestResolver resolver = + new CancelIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java index 7973e49c6efdf8..18ce1d8c27955c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateIngestionExecutionRequestResolverTest.java @@ -1,7 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.datahub.graphql.QueryContext; @@ -12,6 +14,7 @@ import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -19,35 +22,37 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateIngestionExecutionRequestResolverTest { - private static final CreateIngestionExecutionRequestInput TEST_INPUT = new CreateIngestionExecutionRequestInput( - TEST_INGESTION_SOURCE_URN.toString() - ); + private static final CreateIngestionExecutionRequestInput TEST_INPUT = + new CreateIngestionExecutionRequestInput(TEST_INGESTION_SOURCE_URN.toString()); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_INGESTION_SOURCE_URN, - new EntityResponse().setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(getTestIngestionSourceInfo().data())) - ))))); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(getTestIngestionSourceInfo().data()))))))); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -58,11 +63,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -71,7 +76,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -80,21 +86,21 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateIngestionExecutionRequestResolver resolver = new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); + CreateIngestionExecutionRequestResolver resolver = + new CreateIngestionExecutionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -105,4 +111,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java index 75df2404419658..eaf3186524721c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/CreateTestConnectionRequestResolverTest.java @@ -1,25 +1,22 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; -import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestConnectionRequestInput; import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.config.IngestionConfiguration; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestConnectionRequestResolverTest { - private static final CreateTestConnectionRequestInput TEST_INPUT = new CreateTestConnectionRequestInput( - "{}", - "0.8.44" - ); + private static final CreateTestConnectionRequestInput TEST_INPUT = + new CreateTestConnectionRequestInput("{}", "0.8.44"); @Test public void testGetSuccess() throws Exception { @@ -27,7 +24,8 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -37,11 +35,11 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -50,7 +48,8 @@ public void testGetUnauthorized() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); IngestionConfiguration ingestionConfiguration = new IngestionConfiguration(); ingestionConfiguration.setDefaultCliVersion("default"); - CreateTestConnectionRequestResolver resolver = new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); + CreateTestConnectionRequestResolver resolver = + new CreateTestConnectionRequestResolver(mockClient, ingestionConfiguration); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -59,9 +58,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java index 532b9b89f3a991..268f8b8927b674 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/GetIngestionExecutionRequestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -20,9 +23,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetIngestionExecutionRequestResolverTest { @Test @@ -33,32 +33,48 @@ public void testGetSuccess() throws Exception { ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -69,7 +85,8 @@ public void testGetSuccess() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -78,7 +95,9 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test @@ -87,13 +106,16 @@ public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.doThrow(RemoteInvocationException.class) .when(mockClient) - .batchGetV2(Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); - GetIngestionExecutionRequestResolver resolver = new GetIngestionExecutionRequestResolver(mockClient); + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + GetIngestionExecutionRequestResolver resolver = + new GetIngestionExecutionRequestResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_EXECUTION_REQUEST_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java index 25f3ccbd47cd65..fdb150e6924417 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/IngestionSourceExecutionRequestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,10 +31,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class IngestionSourceExecutionRequestsResolverTest { @Test @@ -40,49 +39,65 @@ public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); // Mock filter response - Mockito.when(mockClient.filter( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.any(Filter.class), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(10), - Mockito.any(Authentication.class))) - .thenReturn(new SearchResult() - .setFrom(0) - .setPageSize(10) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableList.of( - new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN)))) - ); + Mockito.when( + mockClient.filter( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.any(Filter.class), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(10), + Mockito.any(Authentication.class))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(10) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(TEST_EXECUTION_REQUEST_URN))))); // Mock batch get response ExecutionRequestInput returnedInput = getTestExecutionRequestInput(); ExecutionRequestResult returnedResult = getTestExecutionRequestResult(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), - Mockito.eq(ImmutableSet.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(TEST_EXECUTION_REQUEST_URN, - new EntityResponse().setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) - .setUrn(TEST_EXECUTION_REQUEST_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInput.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))), - Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedResult.data())) - .setCreated(new AuditStamp() - .setTime(0L) - .setActor(Urn.createFromString("urn:li:corpuser:test"))) - ))))); - - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.EXECUTION_REQUEST_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_EXECUTION_REQUEST_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_EXECUTION_REQUEST_URN, + new EntityResponse() + .setEntityName(Constants.EXECUTION_REQUEST_ENTITY_NAME) + .setUrn(TEST_EXECUTION_REQUEST_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.EXECUTION_REQUEST_INPUT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInput.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test"))), + Constants.EXECUTION_REQUEST_RESULT_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedResult.data())) + .setCreated( + new AuditStamp() + .setTime(0L) + .setActor( + Urn.createFromString("urn:li:corpuser:test")))))))); + + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -99,14 +114,16 @@ public void testGetSuccess() throws Exception { assertEquals((int) executionRequests.getStart(), 0); assertEquals((int) executionRequests.getCount(), 10); assertEquals((int) executionRequests.getTotal(), 1); - verifyTestExecutionRequest(executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); + verifyTestExecutionRequest( + executionRequests.getExecutionRequests().get(0), returnedInput, returnedResult); } @Test public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -119,29 +136,28 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getSource()).thenReturn(parentSource); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).list( - Mockito.any(), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .list( + Mockito.any(), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - IngestionSourceExecutionRequestsResolver resolver = new IngestionSourceExecutionRequestsResolver(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + IngestionSourceExecutionRequestsResolver resolver = + new IngestionSourceExecutionRequestsResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java index c7a72e475f7abd..bec141bddf2600 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/execution/RollbackIngestionResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.execution; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.RollbackIngestionInput; @@ -8,10 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class RollbackIngestionResolverTest { private static final String RUN_ID = "testRunId"; @@ -46,9 +45,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test @@ -59,24 +57,22 @@ public void testRollbackIngestionMethod() throws Exception { QueryContext mockContext = getMockAllowContext(); resolver.rollbackIngestion(RUN_ID, mockContext).get(); - Mockito.verify(mockClient, Mockito.times(1)).rollbackIngestion( - Mockito.eq(RUN_ID), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .rollbackIngestion(Mockito.eq(RUN_ID), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).rollbackIngestion( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .rollbackIngestion(Mockito.any(), Mockito.any(Authentication.class)); RollbackIngestionResolver resolver = new RollbackIngestionResolver(mockClient); QueryContext mockContext = getMockAllowContext(); - assertThrows(RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); + assertThrows( + RuntimeException.class, () -> resolver.rollbackIngestion(RUN_ID, mockContext).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java index 2d64d4ec56ba18..85ef304d285338 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverMatcherTest.java @@ -6,7 +6,6 @@ import com.linkedin.secret.DataHubSecretValue; import org.mockito.ArgumentMatcher; - public class CreateSecretResolverMatcherTest implements ArgumentMatcher { private MetadataChangeProposal left; @@ -24,17 +23,13 @@ public boolean matches(MetadataChangeProposal right) { } private boolean secretPropertiesMatch(GenericAspect left, GenericAspect right) { - DataHubSecretValue leftProps = GenericRecordUtils.deserializeAspect( - left.getValue(), - "application/json", - DataHubSecretValue.class - ); - - DataHubSecretValue rightProps = GenericRecordUtils.deserializeAspect( - right.getValue(), - "application/json", - DataHubSecretValue.class - ); + DataHubSecretValue leftProps = + GenericRecordUtils.deserializeAspect( + left.getValue(), "application/json", DataHubSecretValue.class); + + DataHubSecretValue rightProps = + GenericRecordUtils.deserializeAspect( + right.getValue(), "application/json", DataHubSecretValue.class); // Omit timestamp comparison. return leftProps.getName().equals(rightProps.getName()) diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java index 18ae71661318eb..eafdfde364947c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/CreateSecretResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; @@ -20,24 +22,18 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class CreateSecretResolverTest { - private static final CreateSecretInput TEST_INPUT = new CreateSecretInput( - "MY_SECRET", - "mysecretvalue", - "none" - ); + private static final CreateSecretInput TEST_INPUT = + new CreateSecretInput("MY_SECRET", "mysecretvalue", "none"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))).thenReturn("encryptedvalue"); + Mockito.when(mockSecretService.encrypt(Mockito.eq(TEST_INPUT.getValue()))) + .thenReturn("encryptedvalue"); CreateSecretResolver resolver = new CreateSecretResolver(mockClient, mockSecretService); // Execute resolver @@ -57,18 +53,21 @@ public void testGetSuccess() throws Exception { value.setValue("encryptedvalue"); value.setName(TEST_INPUT.getName()); value.setDescription(TEST_INPUT.getDescription()); - value.setCreated(new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.argThat(new CreateSecretResolverMatcherTest(new MetadataChangeProposal() - .setChangeType(ChangeType.UPSERT) - .setEntityType(Constants.SECRETS_ENTITY_NAME) - .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) - .setAspect(GenericRecordUtils.serializeAspect(value)) - .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + value.setCreated( + new AuditStamp().setActor(UrnUtils.getUrn("urn:li:corpuser:test")).setTime(0L)); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.argThat( + new CreateSecretResolverMatcherTest( + new MetadataChangeProposal() + .setChangeType(ChangeType.UPSERT) + .setEntityType(Constants.SECRETS_ENTITY_NAME) + .setAspectName(Constants.SECRET_VALUE_ASPECT_NAME) + .setAspect(GenericRecordUtils.serializeAspect(value)) + .setEntityKeyAspect(GenericRecordUtils.serializeAspect(key)))), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -80,23 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver @@ -108,4 +105,3 @@ public void testGetEntityClientException() throws Exception { assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); } } - diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java index 679425afbf2e7f..7cfe33feb58fc2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/DeleteSecretResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteSecretResolverTest { @Test @@ -26,7 +26,8 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_SECRET_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test @@ -42,14 +43,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_SECRET_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_SECRET_URN), Mockito.any(Authentication.class)); DeleteSecretResolver resolver = new DeleteSecretResolver(mockClient); // Execute Resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java index 0042d34e602cc1..495adb27dbd5d3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/GetSecretValuesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -22,14 +25,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class GetSecretValuesResolverTest { - private static final GetSecretValuesInput TEST_INPUT = new GetSecretValuesInput( - ImmutableList.of(getTestSecretValue().getName()) - ); + private static final GetSecretValuesInput TEST_INPUT = + new GetSecretValuesInput(ImmutableList.of(getTestSecretValue().getName())); @Test public void testGetSuccess() throws Exception { @@ -39,27 +38,29 @@ public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); SecretService mockSecretService = Mockito.mock(SecretService.class); - Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))).thenReturn(decryptedSecretValue); + Mockito.when(mockSecretService.decrypt(Mockito.eq(getTestSecretValue().getValue()))) + .thenReturn(decryptedSecretValue); DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); @@ -90,22 +91,19 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); SecretService mockSecretService = Mockito.mock(SecretService.class); GetSecretValuesResolver resolver = new GetSecretValuesResolver(mockClient, mockSecretService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java index ad91c214db28f2..7d89f4aafa01a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/secret/ListSecretsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.secret; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,15 +26,9 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListSecretsResolverTest { - private static final ListSecretsInput TEST_INPUT = new ListSecretsInput( - 0, 20, null - ); + private static final ListSecretsInput TEST_INPUT = new ListSecretsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { @@ -40,40 +37,43 @@ public void testGetSuccess() throws Exception { DataHubSecretValue returnedValue = getTestSecretValue(); - Mockito.when(mockClient.search( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.SECRETS_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), - Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_SECRET_URN, - new EntityResponse() - .setEntityName(Constants.SECRETS_ENTITY_NAME) - .setUrn(TEST_SECRET_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.SECRET_VALUE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedValue.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_SECRET_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.SECRETS_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_SECRET_URN))), + Mockito.eq(ImmutableSet.of(Constants.SECRET_VALUE_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_SECRET_URN, + new EntityResponse() + .setEntityName(Constants.SECRETS_ENTITY_NAME) + .setUrn(TEST_SECRET_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.SECRET_VALUE_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedValue.data()))))))); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver @@ -99,36 +99,33 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(SortCriterion.class), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(SortCriterion.class), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListSecretsResolver resolver = new ListSecretsResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java index c898ae72807101..5172ef01c25ebd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/DeleteIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.entity.client.EntityClient; @@ -8,9 +11,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; - public class DeleteIngestionSourceResolverTest { @Test @@ -22,11 +22,13 @@ public void testGetSuccess() throws Exception { // execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertEquals(resolver.get(mockEnv).get(), TEST_INGESTION_SOURCE_URN.toString()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test @@ -38,24 +40,29 @@ public void testGetUnauthorized() throws Exception { // Execute resolver QueryContext mockContext = getMockDenyContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(TEST_INGESTION_SOURCE_URN, mockContext.getAuthentication()); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .deleteEntity(Mockito.eq(TEST_INGESTION_SOURCE_URN), Mockito.any(Authentication.class)); // Execute Resolver QueryContext mockContext = getMockAllowContext(); DeleteIngestionSourceResolver resolver = new DeleteIngestionSourceResolver(mockClient); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java index ebafd1782e000d..bda18961d3890c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/GetIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.assertThrows; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -14,13 +17,9 @@ import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; import java.util.HashSet; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.assertThrows; - public class GetIngestionSourceResolverTest { @Test @@ -30,29 +29,31 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo returnedInfo = getTestIngestionSourceInfo(); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())) - ))) - ) - ); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(returnedInfo.data()))))))); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); // Data Assertions @@ -72,28 +73,26 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); GetIngestionSourceResolver resolver = new GetIngestionSourceResolver(mockClient); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java index 8e2453ce06a398..a86d67fcd15c18 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/ListIngestionSourceResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,13 +26,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static org.testng.Assert.*; - - public class ListIngestionSourceResolverTest { - private static final ListIngestionSourcesInput TEST_INPUT = new ListIngestionSourcesInput(0, 20, null, null); + private static final ListIngestionSourcesInput TEST_INPUT = + new ListIngestionSourcesInput(0, 20, null, null); @Test public void testGetSuccess() throws Exception { @@ -40,41 +40,47 @@ public void testGetSuccess() throws Exception { final DataHubIngestionSourceKey key = new DataHubIngestionSourceKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(""), - Mockito.any(), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN)))) - ); - - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), - Mockito.eq(ImmutableSet.of(Constants.INGESTION_INFO_ASPECT_NAME, Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), - Mockito.any(Authentication.class) - )).thenReturn( - ImmutableMap.of( - TEST_INGESTION_SOURCE_URN, - new EntityResponse() - .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) - .setUrn(TEST_INGESTION_SOURCE_URN) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.INGESTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), - Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(key.data())) - ))) - ) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(""), + Mockito.any(), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_INGESTION_SOURCE_URN))))); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.INGESTION_SOURCE_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(TEST_INGESTION_SOURCE_URN))), + Mockito.eq( + ImmutableSet.of( + Constants.INGESTION_INFO_ASPECT_NAME, + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + TEST_INGESTION_SOURCE_URN, + new EntityResponse() + .setEntityName(Constants.INGESTION_SOURCE_ENTITY_NAME) + .setUrn(TEST_INGESTION_SOURCE_URN) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.INGESTION_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(returnedInfo.data())), + Constants.INGESTION_SOURCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(key.data()))))))); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver @@ -88,7 +94,8 @@ public void testGetSuccess() throws Exception { assertEquals(resolver.get(mockEnv).get().getCount(), 1); assertEquals(resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getIngestionSources().size(), 1); - verifyTestIngestionSourceGraphQL(resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); + verifyTestIngestionSourceGraphQL( + resolver.get(mockEnv).get().getIngestionSources().get(0), returnedInfo); } @Test @@ -100,35 +107,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListIngestionSourcesResolver resolver = new ListIngestionSourcesResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java index 16d8da9169a8fc..8213a5fb61a55b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ingest/source/UpsertIngestionSourceResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ingest.source; +import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateIngestionSourceConfigInput; @@ -15,19 +19,16 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.resolvers.ingest.IngestTestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class UpsertIngestionSourceResolverTest { - private static final UpdateIngestionSourceInput TEST_INPUT = new UpdateIngestionSourceInput( - "Test source", - "mysql", "Test source description", - new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), - new UpdateIngestionSourceConfigInput("my test recipe", "0.8.18", "executor id", false, null) - ); + private static final UpdateIngestionSourceInput TEST_INPUT = + new UpdateIngestionSourceInput( + "Test source", + "mysql", + "Test source description", + new UpdateIngestionSourceScheduleInput("* * * * *", "UTC"), + new UpdateIngestionSourceConfigInput( + "my test recipe", "0.8.18", "executor id", false, null)); @Test public void testGetSuccess() throws Exception { @@ -38,7 +39,8 @@ public void testGetSuccess() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))).thenReturn(TEST_INGESTION_SOURCE_URN.toString()); + Mockito.when(mockEnv.getArgument(Mockito.eq("urn"))) + .thenReturn(TEST_INGESTION_SOURCE_URN.toString()); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -48,24 +50,24 @@ public void testGetSuccess() throws Exception { DataHubIngestionSourceInfo info = new DataHubIngestionSourceInfo(); info.setType(TEST_INPUT.getType()); info.setName(TEST_INPUT.getName()); - info.setSchedule(new DataHubIngestionSourceSchedule() - .setInterval(TEST_INPUT.getSchedule().getInterval()) - .setTimezone(TEST_INPUT.getSchedule().getTimezone()) - ); - info.setConfig(new DataHubIngestionSourceConfig() - .setRecipe(TEST_INPUT.getConfig().getRecipe()) - .setVersion(TEST_INPUT.getConfig().getVersion()) - .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) - .setDebugMode(TEST_INPUT.getConfig().getDebugMode()) - ); - - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(MutationUtils.buildMetadataChangeProposalWithUrn(TEST_INGESTION_SOURCE_URN, - INGESTION_INFO_ASPECT_NAME, info) - ), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + info.setSchedule( + new DataHubIngestionSourceSchedule() + .setInterval(TEST_INPUT.getSchedule().getInterval()) + .setTimezone(TEST_INPUT.getSchedule().getTimezone())); + info.setConfig( + new DataHubIngestionSourceConfig() + .setRecipe(TEST_INPUT.getConfig().getRecipe()) + .setVersion(TEST_INPUT.getConfig().getVersion()) + .setExecutorId(TEST_INPUT.getConfig().getExecutorId()) + .setDebugMode(TEST_INPUT.getConfig().getDebugMode())); + + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq( + MutationUtils.buildMetadataChangeProposalWithUrn( + TEST_INGESTION_SOURCE_URN, INGESTION_INFO_ASPECT_NAME, info)), + Mockito.any(Authentication.class), + Mockito.eq(false)); } @Test @@ -77,24 +79,21 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); UpsertIngestionSourceResolver resolver = new UpsertIngestionSourceResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 61dd6c678e6e0f..8fc5ab6ebb8287 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -14,6 +17,7 @@ import com.linkedin.datahub.graphql.generated.DatasetUpdateInput; import com.linkedin.datahub.graphql.types.BatchMutableType; import com.linkedin.datahub.graphql.types.dataset.DatasetType; +import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; @@ -22,153 +26,158 @@ import com.linkedin.metadata.Constants; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; -import org.mockito.ArgumentCaptor; -import org.mockito.Mockito; -import org.testng.annotations.Test; -import com.linkedin.entity.Aspect; - import java.net.URISyntaxException; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.concurrent.CompletionException; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; +import org.mockito.ArgumentCaptor; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class MutableTypeBatchResolverTest { - private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; - private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; - private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; - private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; - private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; - private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; - private static final Deprecation TEST_DATASET_1_DEPRECATION; - - static { - try { - TEST_DATASET_1_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + private static final String TEST_DATASET_1_URN = "urn:li:dataset:id-1"; + private static final String TEST_DATASET_2_URN = "urn:li:dataset:id-2"; + private static final boolean TEST_DATASET_1_IS_DEPRECATED = true; + private static final boolean TEST_DATASET_2_IS_DEPRECATED = false; + private static final String TEST_DATASET_1_DEPRECATION_NOTE = "Test Deprecation Note"; + private static final String TEST_DATASET_2_DEPRECATION_NOTE = ""; + private static final Deprecation TEST_DATASET_1_DEPRECATION; + + static { + try { + TEST_DATASET_1_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } - - private static final Deprecation TEST_DATASET_2_DEPRECATION; - - static { - try { - TEST_DATASET_2_DEPRECATION = new Deprecation() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .setActor(Urn.createFromString("urn:li:corpuser:datahub")); - } catch (URISyntaxException e) { - throw new RuntimeException(e); - } + } + + private static final Deprecation TEST_DATASET_2_DEPRECATION; + + static { + try { + TEST_DATASET_2_DEPRECATION = + new Deprecation() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .setActor(Urn.createFromString("urn:li:corpuser:datahub")); + } catch (URISyntaxException e) { + throw new RuntimeException(e); } + } - @Test - public void testGetSuccess() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType batchMutableType = new DatasetType(mockClient); + @Test + public void testGetSuccess() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType batchMutableType = + new DatasetType(mockClient); - MutableTypeBatchResolver resolver = new MutableTypeBatchResolver<>(batchMutableType); + MutableTypeBatchResolver resolver = + new MutableTypeBatchResolver<>(batchMutableType); - List mockInputs = Arrays.asList( + List mockInputs = + Arrays.asList( new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_1_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) - .setNote(TEST_DATASET_1_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build(), + .setUrn(TEST_DATASET_1_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_1_IS_DEPRECATED) + .setNote(TEST_DATASET_1_DEPRECATION_NOTE) + .build()) + .build()) + .build(), new BatchDatasetUpdateInput.Builder() - .setUrn(TEST_DATASET_2_URN) - .setUpdate( - new DatasetUpdateInput.Builder() - .setDeprecation( - new DatasetDeprecationUpdate.Builder() - .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) - .setNote(TEST_DATASET_2_DEPRECATION_NOTE) - .build() - ) - .build() - ) - .build() - ); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - Authentication mockAuth = Mockito.mock(Authentication.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); - Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); - - Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); - Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); - - Mockito.when(mockClient.batchGetV2(Mockito.eq(Constants.DATASET_ENTITY_NAME), + .setUrn(TEST_DATASET_2_URN) + .setUpdate( + new DatasetUpdateInput.Builder() + .setDeprecation( + new DatasetDeprecationUpdate.Builder() + .setDeprecated(TEST_DATASET_2_IS_DEPRECATED) + .setNote(TEST_DATASET_2_DEPRECATION_NOTE) + .build()) + .build()) + .build()); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + Mockito.when(mockEnv.getArgument("input")).thenReturn(mockInputs); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + Authentication mockAuth = Mockito.mock(Authentication.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(mockAuth); + Mockito.when(mockAuth.getActor()).thenReturn(new Actor(ActorType.USER, "datahub")); + + Urn datasetUrn1 = Urn.createFromString(TEST_DATASET_1_URN); + Urn datasetUrn2 = Urn.createFromString(TEST_DATASET_2_URN); + + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(new HashSet<>(ImmutableSet.of(datasetUrn1, datasetUrn2))), Mockito.any(), Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - datasetUrn1, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data())) - ))), - datasetUrn2, - new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(datasetUrn2) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATASET_DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data())) - ))) - )); - - List result = resolver.get(mockEnv).join(); - - ArgumentCaptor> changeProposalCaptor = ArgumentCaptor.forClass((Class) Collection.class); - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); - Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATASET_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), - // Dataset aspects to fetch are private, but aren't important for this test - Mockito.any(), - Mockito.any(Authentication.class) - ); - Collection changeProposals = changeProposalCaptor.getValue(); - - assertEquals(changeProposals.size(), 2); - assertEquals(result.size(), 2); - } - - @Test - public void testGetFailureUnauthorized() throws Exception { - EntityClient mockClient = Mockito.mock(RestliEntityClient.class); - BatchMutableType batchMutableType = new DatasetType(mockClient); - - MutableTypeBatchResolver resolver = new MutableTypeBatchResolver<>(batchMutableType); - - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - } + .thenReturn( + ImmutableMap.of( + datasetUrn1, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_1_DEPRECATION.data()))))), + datasetUrn2, + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(datasetUrn2) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATASET_DEPRECATION_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATASET_2_DEPRECATION.data()))))))); + + List result = resolver.get(mockEnv).join(); + + ArgumentCaptor> changeProposalCaptor = + ArgumentCaptor.forClass((Class) Collection.class); + Mockito.verify(mockClient, Mockito.times(1)) + .batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); + Mockito.verify(mockClient, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATASET_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), + // Dataset aspects to fetch are private, but aren't important for this test + Mockito.any(), + Mockito.any(Authentication.class)); + Collection changeProposals = changeProposalCaptor.getValue(); + + assertEquals(changeProposals.size(), 2); + assertEquals(result.size(), 2); + } + + @Test + public void testGetFailureUnauthorized() throws Exception { + EntityClient mockClient = Mockito.mock(RestliEntityClient.class); + BatchMutableType batchMutableType = + new DatasetType(mockClient); + + MutableTypeBatchResolver resolver = + new MutableTypeBatchResolver<>(batchMutableType); + + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockDenyContext(); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java index 1adf7b1200574e..bdadfc98f6d850 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/SiblingsUtilsTest.java @@ -1,60 +1,69 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.common.Siblings; import com.linkedin.common.UrnArray; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.util.SiblingsUtils; import com.linkedin.metadata.entity.EntityService; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashSet; import java.util.Optional; - -import static com.linkedin.metadata.Constants.SIBLINGS_ASPECT_NAME; -import static org.testng.AssertJUnit.assertEquals; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class SiblingsUtilsTest { - private static final String TEST_DATASET_URN1 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; - private static final String TEST_DATASET_URN2 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; - private static final String TEST_DATASET_URN3 = "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; + private static final String TEST_DATASET_URN1 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created,PROD)"; + private static final String TEST_DATASET_URN2 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created2,PROD)"; + private static final String TEST_DATASET_URN3 = + "urn:li:dataset:(urn:li:dataPlatform:hive,fct_cypress_users_created3,PROD)"; @Test public void testGetSiblingUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings().setSiblings(siblingUrns) - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings().setSiblings(siblingUrns)); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), siblingUrns); } @Test public void testGetSiblingUrnsWithoutSiblings() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - new Siblings() - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(new Siblings()); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetSiblingUrnsWithSiblingsAspect() { EntityService mockService = Mockito.mock(EntityService.class); - Mockito.when(mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)).thenReturn( - null - ); + Mockito.when( + mockService.getLatestAspect(UrnUtils.getUrn(TEST_DATASET_URN1), SIBLINGS_ASPECT_NAME)) + .thenReturn(null); - assertEquals(SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), new UrnArray()); + assertEquals( + SiblingsUtils.getSiblingUrns(UrnUtils.getUrn(TEST_DATASET_URN1), mockService), + new UrnArray()); } @Test public void testGetNextSiblingUrn() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); Optional nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, new HashSet<>()); assertEquals(nextUrn, Optional.of(UrnUtils.getUrn(TEST_DATASET_URN2))); @@ -62,7 +71,8 @@ public void testGetNextSiblingUrn() { @Test public void testGetNextSiblingUrnWithUsedUrns() { - UrnArray siblingUrns = new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); + UrnArray siblingUrns = + new UrnArray(UrnUtils.getUrn(TEST_DATASET_URN2), UrnUtils.getUrn(TEST_DATASET_URN3)); HashSet usedUrns = new HashSet<>(); usedUrns.add(UrnUtils.getUrn(TEST_DATASET_URN2)); Optional nextUrn = SiblingsUtils.getNextSiblingUrn(siblingUrns, usedUrns); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index 9bd44e9ab09065..3fee28bc317257 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.mutate; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; + import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateUserSettingInput; @@ -12,13 +15,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; - - public class UpdateUserSettingResolverTest { private static final String TEST_USER_URN = "urn:li:corpuser:test"; + @Test public void testWriteCorpUserSettings() throws Exception { EntityService mockService = getMockEntityService(); @@ -36,9 +36,12 @@ public void testWriteCorpUserSettings() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - CorpUserSettings newSettings = new CorpUserSettings().setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_USER_URN), - CORP_USER_SETTINGS_ASPECT_NAME, newSettings); + CorpUserSettings newSettings = + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_USER_URN), CORP_USER_SETTINGS_ASPECT_NAME, newSettings); verifySingleIngestProposal(mockService, 1, proposal); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java index e2661841fe8f70..abc1a5786f3638 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/operation/ReportOperationResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.operation; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.Operation; import com.linkedin.common.OperationSourceType; @@ -16,37 +20,35 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class ReportOperationResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Operation expectedOperation = new Operation() - .setTimestampMillis(0L) - .setLastUpdatedTimestamp(0L) - .setOperationType(OperationType.INSERT) - .setSourceType(OperationSourceType.DATA_PLATFORM) - .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) - .setCustomOperationType(null, SetMode.IGNORE_NULL) - .setNumAffectedRows(1L); + Operation expectedOperation = + new Operation() + .setTimestampMillis(0L) + .setLastUpdatedTimestamp(0L) + .setOperationType(OperationType.INSERT) + .setSourceType(OperationSourceType.DATA_PLATFORM) + .setActor(UrnUtils.getUrn("urn:li:corpuser:test")) + .setCustomOperationType(null, SetMode.IGNORE_NULL) + .setNumAffectedRows(1L); - MetadataChangeProposal expectedProposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - OPERATION_ASPECT_NAME, expectedOperation); + MetadataChangeProposal expectedProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), OPERATION_ASPECT_NAME, expectedOperation); // Test setting the domain - Mockito.when(mockClient.ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class))) - .thenReturn(TEST_ENTITY_URN); + Mockito.when( + mockClient.ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class))) + .thenReturn(TEST_ENTITY_URN); ReportOperationResolver resolver = new ReportOperationResolver(mockClient); @@ -57,11 +59,9 @@ public void testGetSuccess() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(expectedProposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal( + Mockito.eq(expectedProposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -77,9 +77,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private ReportOperationInput getTestInput() { @@ -91,4 +90,4 @@ private ReportOperationInput getTestInput() { input.setSourceType(com.linkedin.datahub.graphql.generated.OperationSourceType.DATA_PLATFORM); return input; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 329d71ec125db0..74f88f95fc171e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -24,13 +27,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddOwnersResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_OWNER_1_URN = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_2_URN = "urn:li:corpuser:test-id-2"; private static final String TEST_OWNER_3_URN = "urn:li:corpGroup:test-id-3"; @@ -39,18 +39,23 @@ public class AddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -58,12 +63,20 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), - new OwnerInput(TEST_OWNER_2_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) - ), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())), + new OwnerInput( + TEST_OWNER_2_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -71,38 +84,45 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); } @Test public void testGetSuccessExistingOwnerNewType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -110,13 +130,16 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -124,34 +147,42 @@ public void testGetSuccessExistingOwnerNewType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.TECHNICAL_OWNER) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -159,12 +190,16 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -172,39 +207,51 @@ public void testGetSuccessDeprecatedTypeToOwnershipType() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); } @Test public void testGetSuccessMultipleOwnerTypes() throws Exception { EntityService mockService = getMockEntityService(); - com.linkedin.common.Ownership oldOwnership = new Ownership().setOwners(new OwnerArray( - ImmutableList.of(new Owner() - .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) - .setType(com.linkedin.common.OwnershipType.NONE) - .setSource(new OwnershipSource().setType(OwnershipSourceType.MANUAL)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(oldOwnership); + com.linkedin.common.Ownership oldOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(UrnUtils.getUrn(TEST_OWNER_1_URN)) + .setType(com.linkedin.common.OwnershipType.NONE) + .setSource( + new OwnershipSource().setType(OwnershipSourceType.MANUAL))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(oldOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_1_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_2_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_3_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) - .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) - .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) + .thenReturn(true); + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) + .thenReturn(true); AddOwnersResolver resolver = new AddOwnersResolver(mockService); @@ -212,22 +259,28 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of(OwnerInput.builder() + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_1_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_2_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.BUSINESS_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_USER) .build(), - OwnerInput.builder() + OwnerInput.builder() .setOwnerUrn(TEST_OWNER_3_URN) - .setOwnershipTypeUrn(OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) + .setOwnershipTypeUrn( + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name())) .setOwnerEntityType(OwnerEntityType.CORP_GROUP) - .build() - ), TEST_ENTITY_URN); + .build()), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -235,27 +288,25 @@ public void testGetSuccessMultipleOwnerTypes() throws Exception { // Unable to easily validate exact payload due to the injected timestamp verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_2_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_3_URN))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -266,9 +317,15 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,10 +337,11 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -294,9 +352,15 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -312,9 +376,15 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -327,21 +397,30 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddOwnersInput input = new AddOwnersInput(ImmutableList.of( - new OwnerInput(TEST_OWNER_1_URN, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), TEST_ENTITY_URN); + AddOwnersInput input = + new AddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_1_URN, + OwnerEntityType.CORP_USER, + OwnershipType.TECHNICAL_OWNER, + OwnerUtils.mapOwnershipTypeToEntity(OwnershipType.TECHNICAL_OWNER.name()))), + TEST_ENTITY_URN); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 79fc62742f4442..92a789530d6e4f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -23,14 +26,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -38,16 +39,18 @@ public class BatchAddOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -56,8 +59,12 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -65,52 +72,64 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetSuccessExistingOwners() throws Exception { - final Ownership originalOwnership = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); + final Ownership originalOwnership = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalOwnership); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -119,12 +138,20 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_OWNER_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.TECHNICAL_OWNER + .name())))) .thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString( - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())))) + Mockito.when( + mockService.exists( + Urn.createFromString( + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())))) .thenReturn(true); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); @@ -132,44 +159,49 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of( - new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_OWNER_URN_2))); } @Test public void testGetFailureOwnerDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -180,20 +212,27 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -205,15 +244,17 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -225,20 +266,27 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -254,20 +302,27 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -280,32 +335,42 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddOwnersInput input = new BatchAddOwnersInput(ImmutableList.of(new OwnerInput( - TEST_OWNER_URN_1, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name())), - new OwnerInput( - TEST_OWNER_URN_2, - OwnerEntityType.CORP_USER, - com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, - OwnerUtils.mapOwnershipTypeToEntity(com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER.name()))), - null, - ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddOwnersInput input = + new BatchAddOwnersInput( + ImmutableList.of( + new OwnerInput( + TEST_OWNER_URN_1, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name())), + new OwnerInput( + TEST_OWNER_URN_2, + OwnerEntityType.CORP_USER, + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER, + OwnerUtils.mapOwnershipTypeToEntity( + com.linkedin.datahub.graphql.generated.OwnershipType.BUSINESS_OWNER + .name()))), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index 9dc2ec81278069..7cef90ffee5121 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.owner; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.Owner; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveOwnersResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_OWNER_URN_1 = "urn:li:corpuser:test-id-1"; private static final String TEST_OWNER_URN_2 = "urn:li:corpuser:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveOwnersResolverTest { public void testGetSuccessNoExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,13 @@ public void testGetSuccessNoExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of( - TEST_OWNER_URN_1, - TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,24 +78,36 @@ public void testGetSuccessNoExistingOwners() throws Exception { public void testGetSuccessExistingOwners() throws Exception { EntityService mockService = getMockEntityService(); - final Ownership oldOwners1 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_1)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners1 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_1)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners1); - final Ownership oldOwners2 = new Ownership().setOwners(new OwnerArray(ImmutableList.of( - new Owner().setOwner(Urn.createFromString(TEST_OWNER_URN_2)).setType(OwnershipType.TECHNICAL_OWNER) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + final Ownership oldOwners2 = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setOwner(Urn.createFromString(TEST_OWNER_URN_2)) + .setType(OwnershipType.TECHNICAL_OWNER)))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldOwners2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -105,10 +121,13 @@ public void testGetSuccessExistingOwners() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -120,15 +139,17 @@ public void testGetSuccessExistingOwners() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.OWNERSHIP_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -140,10 +161,13 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +183,13 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -175,22 +202,28 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveOwnersInput input = new BatchRemoveOwnersInput(ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2 - ), null, ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveOwnersInput input = + new BatchRemoveOwnersInput( + ImmutableList.of(TEST_OWNER_URN_1, TEST_OWNER_URN_2), + null, + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java index 0643ead444c948..ff11d971b52e88 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/CreateOwnershipTypeResolverTest.java @@ -1,12 +1,15 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateOwnershipTypeInput; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.service.OwnershipTypeService; @@ -15,15 +18,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateOwnershipTypeResolverTest { - private static final CreateOwnershipTypeInput TEST_INPUT = new CreateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final CreateOwnershipTypeInput TEST_INPUT = + new CreateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); @@ -45,10 +44,12 @@ public void testCreateSuccess() throws Exception { assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); assertEquals(ownershipType.getType(), EntityType.CUSTOM_OWNERSHIP_TYPE); - Mockito.verify(mockService, Mockito.times(1)).createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -65,20 +66,18 @@ public void testCreateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testCreateOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createOwnershipType( - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createOwnershipType( + Mockito.any(), Mockito.any(), Mockito.any(Authentication.class), Mockito.anyLong()); CreateOwnershipTypeResolver resolver = new CreateOwnershipTypeResolver(mockService); @@ -93,12 +92,13 @@ public void testCreateOwnershipTypeServiceException() throws Exception { private OwnershipTypeService initMockService() { OwnershipTypeService service = Mockito.mock(OwnershipTypeService.class); - Mockito.when(service.createOwnershipType( - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_OWNERSHIP_TYPE_URN); + Mockito.when( + service.createOwnershipType( + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_OWNERSHIP_TYPE_URN); return service; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java index 9f526e40082366..ae97164a2787e9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/DeleteOwnershipTypeResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.ArgumentMatchers.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -14,11 +18,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.ArgumentMatchers.*; -import static org.testng.Assert.*; - - public class DeleteOwnershipTypeResolverTest { private static final Urn TEST_URN = @@ -41,11 +40,8 @@ public void testGetSuccessOwnershipTypeCanManage() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test @@ -62,21 +58,17 @@ public void testGetFailureOwnershipTypeCanNotManager() throws Exception { assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteOwnershipType( - Mockito.eq(TEST_URN), - anyBoolean(), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteOwnershipType(Mockito.eq(TEST_URN), anyBoolean(), Mockito.any(Authentication.class)); } @Test public void testGetOwnershipTypeServiceException() throws Exception { // Create resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteOwnershipType( - Mockito.any(), - anyBoolean(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteOwnershipType(Mockito.any(), anyBoolean(), Mockito.any(Authentication.class)); DeleteOwnershipTypeResolver resolver = new DeleteOwnershipTypeResolver(mockService); @@ -93,17 +85,18 @@ public void testGetOwnershipTypeServiceException() throws Exception { private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java index ceab13167246c8..fd7baf6af74691 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/ListOwnershipTypesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,16 +21,13 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListOwnershipTypesResolverTest { private static final Urn TEST_OWNERSHIP_TYPE_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); - private static final ListOwnershipTypesInput TEST_INPUT = new ListOwnershipTypesInput(0, 20, "", null); + private static final ListOwnershipTypesInput TEST_INPUT = + new ListOwnershipTypesInput(0, 20, "", null); @Test public void testGetSuccess() throws Exception { @@ -38,21 +38,24 @@ public void testGetSuccess() throws Exception { final OwnershipTypeKey key = new OwnershipTypeKey(); key.setId("test"); - Mockito.when(mockClient.search( - Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(null), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.OWNERSHIP_TYPE_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(null), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_OWNERSHIP_TYPE_URN))))); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); @@ -78,35 +81,32 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(RuntimeException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.any(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.any(), Mockito.anySet(), Mockito.anySet(), Mockito.any(Authentication.class)); ListOwnershipTypesResolver resolver = new ListOwnershipTypesResolver(mockClient); // Execute resolver @@ -124,4 +124,4 @@ public static OwnershipTypeInfo getOwnershipTypeInfo() { info.setDescription("some description"); return info; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java index f35b8f98cc1acf..6e428842201d53 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/ownership/UpdateOwnershipTypeResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.ownership; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.linkedin.common.AuditStamp; @@ -7,8 +10,8 @@ import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.AuthorizationException; -import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity; import com.linkedin.datahub.graphql.generated.UpdateOwnershipTypeInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; @@ -24,19 +27,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateOwnershipTypeResolverTest { private static final Urn TEST_URN = Urn.createFromTuple(Constants.OWNERSHIP_TYPE_ENTITY_NAME, "test"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateOwnershipTypeInput TEST_INPUT = new UpdateOwnershipTypeInput( - "Custom ownership", - "A custom ownership description for testing purposes"); + private static final UpdateOwnershipTypeInput TEST_INPUT = + new UpdateOwnershipTypeInput( + "Custom ownership", "A custom ownership description for testing purposes"); @Test public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { @@ -55,23 +54,27 @@ public void testUpdateSuccessOwnershipTypeCanManage() throws Exception { assertEquals(ownershipType.getInfo().getName(), TEST_INPUT.getName()); assertEquals(ownershipType.getInfo().getDescription(), TEST_INPUT.getDescription()); - Mockito.verify(mockService, Mockito.times(1)).updateOwnershipType( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateOwnershipType( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testUpdateOwnershipTypeServiceException() throws Exception { // Update resolver OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateOwnershipType( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateOwnershipType( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateOwnershipTypeResolver resolver = new UpdateOwnershipTypeResolver(mockService); @@ -100,39 +103,41 @@ public void testUpdateUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(AuthorizationException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static OwnershipTypeService initOwnershipTypeService() { OwnershipTypeService mockService = Mockito.mock(OwnershipTypeService.class); - OwnershipTypeInfo testInfo = new OwnershipTypeInfo() - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getOwnershipTypeInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + OwnershipTypeInfo testInfo = + new OwnershipTypeInfo() + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.OWNERSHIP_TYPE_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.OWNERSHIP_TYPE_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when( + mockService.getOwnershipTypeInfo( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getOwnershipTypeEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getOwnershipTypeEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java index b56d897a468ba8..2827e3602e3795 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.Media; @@ -15,11 +19,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreatePostResolverTest { private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; private static final String POST_MEDIA_LOCATION = @@ -59,9 +58,12 @@ public void testCreatePost() throws Exception { UpdateMediaInput media = new UpdateMediaInput(); media.setType(POST_MEDIA_TYPE); media.setLocation(POST_MEDIA_LOCATION); - Media mediaObj = new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION)); - when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))).thenReturn(mediaObj); + Media mediaObj = + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))) + .thenReturn(mediaObj); UpdatePostContentInput content = new UpdatePostContentInput(); content.setTitle(POST_TITLE); @@ -69,22 +71,33 @@ public void testCreatePost() throws Exception { content.setLink(POST_LINK); content.setContentType(POST_CONTENT_TYPE); content.setMedia(media); - com.linkedin.post.PostContent postContentObj = new com.linkedin.post.PostContent().setType( - com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) - .setLocation(new Url(POST_MEDIA_LOCATION))); - when(_postService.mapPostContent(eq(POST_CONTENT_TYPE.toString()), eq(POST_TITLE), eq(POST_DESCRIPTION), - eq(POST_LINK), any(Media.class))).thenReturn(postContentObj); + com.linkedin.post.PostContent postContentObj = + new com.linkedin.post.PostContent() + .setType(com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia( + new Media() + .setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(_postService.mapPostContent( + eq(POST_CONTENT_TYPE.toString()), + eq(POST_TITLE), + eq(POST_DESCRIPTION), + eq(POST_LINK), + any(Media.class))) + .thenReturn(postContentObj); CreatePostInput input = new CreatePostInput(); input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); input.setContent(content); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - when(_postService.createPost(eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), eq(postContentObj), - eq(_authentication))).thenReturn(true); + when(_postService.createPost( + eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), + eq(postContentObj), + eq(_authentication))) + .thenReturn(true); assertTrue(_resolver.get(_dataFetchingEnvironment).join()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java index b8a7488a824fd4..085cfd05697812 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/DeletePostResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.post.PostService; import com.linkedin.common.urn.Urn; @@ -9,11 +13,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class DeletePostResolverTest { private static final String POST_URN_STRING = "urn:li:post:123"; private PostService _postService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java index c22d6bf39640d4..6c475cdc7f5a85 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.post; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,17 +33,10 @@ import graphql.schema.DataFetchingEnvironment; import java.net.URISyntaxException; import java.util.Map; - import org.mockito.Mockito; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListPostsResolverTest { private static Map _entityResponseMap; private static final String POST_URN_STRING = "urn:li:post:examplePost"; @@ -49,12 +47,15 @@ public class ListPostsResolverTest { private static final String POST_TITLE = "title"; private static final String POST_DESCRIPTION = "description"; private static final String POST_LINK = "https://datahubproject.io"; - private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); - private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) - .setTitle(POST_TITLE) - .setDescription(POST_DESCRIPTION) - .setLink(new Url(POST_LINK)) - .setMedia(MEDIA); + private static final Media MEDIA = + new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = + new PostContent() + .setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; private EntityClient _entityClient; @@ -72,8 +73,11 @@ private Map getMockPostsEntityResponse() throws URISyntaxEx DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(postUrn.toString()); dataHubRoleInfo.setName(postUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return ImmutableMap.of(postUrn, entityResponse); } @@ -106,13 +110,27 @@ public void testListPosts() throws Exception { ListPostsInput input = new ListPostsInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(1); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(1); roleSearchResult.setEntities( - new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); - - when(_entityClient.search(eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt(), - eq(_authentication), Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchEntityArray( + ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); + + when(_entityClient.search( + eq(POST_ENTITY_NAME), + any(), + eq(null), + any(), + anyInt(), + anyInt(), + eq(_authentication), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListPostsResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java index 9c04c67dd3a3b3..eebe0034fce612 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/CreateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -24,10 +26,10 @@ import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.service.QueryService; -import com.linkedin.entity.client.EntityClient; import com.linkedin.query.QueryProperties; import com.linkedin.query.QuerySource; import com.linkedin.query.QueryStatement; @@ -40,21 +42,19 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class CreateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final CreateQueryInput TEST_INPUT = new CreateQueryInput( - new CreateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString())) - ); + private static final CreateQueryInput TEST_INPUT = + new CreateQueryInput( + new CreateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new CreateQuerySubjectInput(TEST_DATASET_URN.toString()))); @Test public void testGetSuccess() throws Exception { @@ -70,25 +70,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -105,23 +115,24 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateQueryResolver resolver = new CreateQueryResolver(mockService); @@ -136,58 +147,68 @@ public void testGetQueryServiceException() throws Exception { private QueryService initMockService() { QueryService service = Mockito.mock(QueryService.class); - Mockito.when(service.createQuery( - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(QuerySource.MANUAL), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN) - )), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_QUERY_URN); - - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects querySubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(querySubjects.data())) - ))) - ); + Mockito.when( + service.createQuery( + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq(QuerySource.MANUAL), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT + .getProperties() + .getStatement() + .getLanguage() + .toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN))), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_QUERY_URN); + + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects querySubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(querySubjects.data())))))); return service; } @@ -197,36 +218,40 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java index 78c894f27cbc3b..96ddc632562eea 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/DeleteQueryResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -22,14 +25,11 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteQueryResolverTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); @Test @@ -45,10 +45,8 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -62,10 +60,8 @@ public void testGetSuccessCanEditQueries() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test @@ -79,19 +75,17 @@ public void testGetFailureActorUnauthorized() { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).deleteQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteQuery(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Create resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteQuery( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteQuery(Mockito.any(), Mockito.any(Authentication.class)); DeleteQueryResolver resolver = new DeleteQueryResolver(mockService); @@ -108,14 +102,13 @@ private static QueryService initMockService() { QueryService mockService = Mockito.mock(QueryService.class); QuerySubjects existingQuerySubjects = new QuerySubjects(); - existingQuerySubjects.setSubjects(new QuerySubjectArray( - ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)) - )); + existingQuerySubjects.setSubjects( + new QuerySubjectArray(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(mockService.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) - .thenReturn(existingQuerySubjects); + Mockito.when( + mockService.getQuerySubjects( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn(existingQuerySubjects); return mockService; } @@ -126,40 +119,47 @@ private QueryContext getMockAllowEditQueriesOnQueryContext() { private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getActorUrn()).thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); + Mockito.when(mockContext.getActorUrn()) + .thenReturn(DeleteQueryResolverTest.TEST_ACTOR_URN.toString()); Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest = new AuthorizationRequest( - DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), - DeleteQueryResolverTest.TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); + AuthorizationRequest editQueriesRequest = + new AuthorizationRequest( + DeleteQueryResolverTest.TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec( + DeleteQueryResolverTest.TEST_DATASET_URN.getEntityType(), + DeleteQueryResolverTest.TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); AuthorizationResult editQueriesResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))).thenReturn(editQueriesResult); + Mockito.when(editQueriesResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest))) + .thenReturn(editQueriesResult); AuthorizationResult editAllResult = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest))).thenReturn(editAllResult); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java index 877a4d2b27f6ae..8a56b142e5b5ef 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/ListQueriesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -32,28 +35,24 @@ import org.testng.annotations.DataProvider; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListQueriesResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); private static final Urn TEST_QUERY_URN = Urn.createFromTuple("query", "test-id"); - private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = new ListQueriesInput( - 0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString() - ); - private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = new ListQueriesInput( - 0, 30, null, QuerySource.MANUAL, null - ); - private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = new ListQueriesInput( - 0, 40, null, null, TEST_DATASET_URN.toString() - ); + private static final ListQueriesInput TEST_INPUT_FULL_FILTERS = + new ListQueriesInput(0, 20, null, QuerySource.MANUAL, TEST_DATASET_URN.toString()); + private static final ListQueriesInput TEST_INPUT_SOURCE_FILTER = + new ListQueriesInput(0, 30, null, QuerySource.MANUAL, null); + private static final ListQueriesInput TEST_INPUT_ENTITY_FILTER = + new ListQueriesInput(0, 40, null, null, TEST_DATASET_URN.toString()); @DataProvider(name = "inputs") public static Object[][] inputs() { - return new Object[][] {{ TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER}}; + return new Object[][] { + {TEST_INPUT_FULL_FILTERS}, {TEST_INPUT_SOURCE_FILTER}, {TEST_INPUT_ENTITY_FILTER} + }; } @Test(dataProvider = "inputs") @@ -61,22 +60,30 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(input.getQuery() == null ? ListQueriesResolver.DEFAULT_QUERY : input.getQuery()), - Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), - Mockito.eq(new SortCriterion().setField(ListQueriesResolver.CREATED_AT_FIELD).setOrder(SortOrder.DESCENDING)), - Mockito.eq(input.getStart()), - Mockito.eq(input.getCount()), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq( + input.getQuery() == null + ? ListQueriesResolver.DEFAULT_QUERY + : input.getQuery()), + Mockito.eq(buildFilter(input.getSource(), input.getDatasetUrn())), + Mockito.eq( + new SortCriterion() + .setField(ListQueriesResolver.CREATED_AT_FIELD) + .setOrder(SortOrder.DESCENDING)), + Mockito.eq(input.getStart()), + Mockito.eq(input.getCount()), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_QUERY_URN))))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); @@ -90,7 +97,8 @@ public void testGetSuccess(final ListQueriesInput input) throws Exception { assertEquals((int) resolver.get(mockEnv).get().getCount(), 1); assertEquals((int) resolver.get(mockEnv).get().getTotal(), 1); assertEquals(resolver.get(mockEnv).get().getQueries().size(), 1); - assertEquals(resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); + assertEquals( + resolver.get(mockEnv).get().getQueries().get(0).getUrn(), TEST_QUERY_URN.toString()); } @Test @@ -102,33 +110,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT_FULL_FILTERS); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT_FULL_FILTERS); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq("*"), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq("*"), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true))); ListQueriesResolver resolver = new ListQueriesResolver(mockClient); // Execute resolver @@ -146,7 +156,8 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity if (source != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_SOURCE_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_SOURCE_FIELD, null, ImmutableList.of(source.toString()), false, @@ -154,14 +165,14 @@ private Filter buildFilter(@Nullable QuerySource source, @Nullable String entity } if (entityUrn != null) { andConditions.add( - new FacetFilterInput(ListQueriesResolver.QUERY_ENTITIES_FIELD, + new FacetFilterInput( + ListQueriesResolver.QUERY_ENTITIES_FIELD, null, ImmutableList.of(entityUrn), false, FilterOperator.EQUAL)); - } criteria.setAnd(andConditions); return ResolverUtils.buildFilter(Collections.emptyList(), ImmutableList.of(criteria)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java index 9b500b5fb39361..766d8a2ccb1367 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/query/UpdateQueryResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -13,12 +15,12 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateQueryInput; -import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; -import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.datahub.graphql.generated.QueryLanguage; import com.linkedin.datahub.graphql.generated.QueryStatementInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryInput; +import com.linkedin.datahub.graphql.generated.UpdateQueryPropertiesInput; +import com.linkedin.datahub.graphql.generated.UpdateQuerySubjectInput; import com.linkedin.entity.Aspect; import com.linkedin.entity.AspectType; import com.linkedin.entity.EntityResponse; @@ -40,22 +42,21 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class UpdateQueryResolverTest { - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); - private static final Urn TEST_DATASET_URN_2 = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"); + private static final Urn TEST_DATASET_URN_2 = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"); private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:my-unique-query"); private static final Urn TEST_ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateQueryInput TEST_INPUT = new UpdateQueryInput( - new UpdateQueryPropertiesInput( - "test-id", - "test-description", - new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), - ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString())) - ); + private static final UpdateQueryInput TEST_INPUT = + new UpdateQueryInput( + new UpdateQueryPropertiesInput( + "test-id", + "test-description", + new QueryStatementInput("SELECT * FROM TABLE", QueryLanguage.SQL)), + ImmutableList.of(new UpdateQuerySubjectInput(TEST_DATASET_URN_2.toString()))); @Test public void testGetSuccess() throws Exception { @@ -72,25 +73,35 @@ public void testGetSuccess() throws Exception { QueryEntity query = resolver.get(mockEnv).get(); assertEquals(query.getProperties().getName(), TEST_INPUT.getProperties().getName()); - assertEquals(query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); + assertEquals( + query.getProperties().getDescription(), TEST_INPUT.getProperties().getDescription()); assertEquals(query.getProperties().getSource().toString(), QuerySource.MANUAL.toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_INPUT.getProperties().getStatement().getValue()); - assertEquals(query.getProperties().getStatement().getLanguage(), TEST_INPUT.getProperties().getStatement().getLanguage()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_INPUT.getSubjects().get(0).getDatasetUrn()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_INPUT.getProperties().getStatement().getValue()); + assertEquals( + query.getProperties().getStatement().getLanguage(), + TEST_INPUT.getProperties().getStatement().getLanguage()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_INPUT.getSubjects().get(0).getDatasetUrn()); assertEquals(query.getProperties().getCreated().getActor(), TEST_ACTOR_URN.toString()); assertEquals(query.getProperties().getLastModified().getActor(), TEST_ACTOR_URN.toString()); - Mockito.verify(mockService, Mockito.times(1)).updateQuery( - Mockito.eq(TEST_QUERY_URN), - Mockito.eq(TEST_INPUT.getProperties().getName()), - Mockito.eq(TEST_INPUT.getProperties().getDescription()), - Mockito.eq(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), - Mockito.eq(ImmutableList.of( - new QuerySubject().setEntity(TEST_DATASET_URN_2) - )), Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateQuery( + Mockito.eq(TEST_QUERY_URN), + Mockito.eq(TEST_INPUT.getProperties().getName()), + Mockito.eq(TEST_INPUT.getProperties().getDescription()), + Mockito.eq( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))), + Mockito.eq(ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -108,23 +119,24 @@ public void testGetUnauthorizedNoEditQueriesRights() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetQueryServiceException() throws Exception { // Update resolver QueryService mockService = Mockito.mock(QueryService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateQuery( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateQuery( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateQueryResolver resolver = new UpdateQueryResolver(mockService); @@ -143,56 +155,59 @@ private QueryService initMockService() { // Pre-Update QueryService service = Mockito.mock(QueryService.class); - final QuerySubjects existingSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ))); + final QuerySubjects existingSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); - Mockito.when(service.getQuerySubjects( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class))) + Mockito.when( + service.getQuerySubjects(Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) .thenReturn(existingSubjects); // Post-Update - final QueryProperties queryProperties = new QueryProperties() - .setName(TEST_INPUT.getProperties().getName()) - .setDescription(TEST_INPUT.getProperties().getDescription()) - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setValue(TEST_INPUT.getProperties().getStatement().getValue()) - .setLanguage(com.linkedin.query.QueryLanguage.valueOf(TEST_INPUT.getProperties().getStatement().getLanguage().toString())) - ); - - final QuerySubjects newSubjects = new QuerySubjects() - .setSubjects(new QuerySubjectArray(ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN_2) - ))); - - Mockito.when(service.getQueryEntityResponse( - Mockito.eq(TEST_QUERY_URN), - Mockito.any(Authentication.class) - )).thenReturn(new EntityResponse() - .setUrn(TEST_QUERY_URN) - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(queryProperties.data())), - Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) - .setVersion(0L) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(newSubjects.data())) - ))) - ); + final QueryProperties queryProperties = + new QueryProperties() + .setName(TEST_INPUT.getProperties().getName()) + .setDescription(TEST_INPUT.getProperties().getDescription()) + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN)) + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setValue(TEST_INPUT.getProperties().getStatement().getValue()) + .setLanguage( + com.linkedin.query.QueryLanguage.valueOf( + TEST_INPUT.getProperties().getStatement().getLanguage().toString()))); + + final QuerySubjects newSubjects = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN_2)))); + + Mockito.when( + service.getQueryEntityResponse( + Mockito.eq(TEST_QUERY_URN), Mockito.any(Authentication.class))) + .thenReturn( + new EntityResponse() + .setUrn(TEST_QUERY_URN) + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.QUERY_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_PROPERTIES_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(queryProperties.data())), + Constants.QUERY_SUBJECTS_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.QUERY_SUBJECTS_ASPECT_NAME) + .setVersion(0L) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(newSubjects.data())))))); return service; } @@ -202,62 +217,71 @@ private QueryContext getMockQueryContext(boolean allowEditEntityQueries) { Authorizer mockAuthorizer = Mockito.mock(Authorizer.class); - AuthorizationRequest editQueriesRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editAllRequest1 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN.getEntityType(), - TEST_DATASET_URN.toString())) - ); - - AuthorizationRequest editQueriesRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); - - AuthorizationRequest editAllRequest2 = new AuthorizationRequest( - TEST_ACTOR_URN.toString(), - PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), - Optional.of( - new EntitySpec( - TEST_DATASET_URN_2.getEntityType(), - TEST_DATASET_URN_2.toString())) - ); + AuthorizationRequest editQueriesRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editAllRequest1 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN.getEntityType(), TEST_DATASET_URN.toString()))); + + AuthorizationRequest editQueriesRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_QUERIES_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); + + AuthorizationRequest editAllRequest2 = + new AuthorizationRequest( + TEST_ACTOR_URN.toString(), + PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType(), + Optional.of( + new EntitySpec(TEST_DATASET_URN_2.getEntityType(), TEST_DATASET_URN_2.toString()))); AuthorizationResult editQueriesResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))).thenReturn(editQueriesResult1); + Mockito.when(editQueriesResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest1))) + .thenReturn(editQueriesResult1); AuthorizationResult editAllResult1 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult1.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult1.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest1))).thenReturn(editAllResult1); AuthorizationResult editQueriesResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editQueriesResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); - Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))).thenReturn(editQueriesResult2); + Mockito.when(editQueriesResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); + Mockito.when(mockAuthorizer.authorize(Mockito.eq(editQueriesRequest2))) + .thenReturn(editQueriesResult2); AuthorizationResult editAllResult2 = Mockito.mock(AuthorizationResult.class); - Mockito.when(editAllResult2.getType()).thenReturn(allowEditEntityQueries ? AuthorizationResult.Type.ALLOW : AuthorizationResult.Type.DENY); + Mockito.when(editAllResult2.getType()) + .thenReturn( + allowEditEntityQueries + ? AuthorizationResult.Type.ALLOW + : AuthorizationResult.Type.DENY); Mockito.when(mockAuthorizer.authorize(Mockito.eq(editAllRequest2))).thenReturn(editAllResult2); Mockito.when(mockContext.getAuthorizer()).thenReturn(mockAuthorizer); - Mockito.when(mockContext.getAuthentication()).thenReturn( - new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds") - ); + Mockito.when(mockContext.getAuthentication()) + .thenReturn(new Authentication(new Actor(ActorType.USER, TEST_ACTOR_URN.getId()), "creds")); return mockContext; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java index 3cde81d7a7f31d..fe032d0bf48590 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/AcceptRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class AcceptRoleResolverTest { private static final String INVITE_TOKEN_URN_STRING = "urn:li:inviteToken:admin-invite-token"; private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; @@ -54,7 +53,8 @@ public void testInvalidInviteToken() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(false); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(false); AcceptRoleInput input = new AcceptRoleInput(); input.setInviteToken(INVITE_TOKEN_STRING); @@ -69,8 +69,10 @@ public void testNoRoleUrn() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(null); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(null); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); @@ -89,8 +91,10 @@ public void testAssignRolePasses() throws Exception { when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); when(_inviteTokenService.getInviteTokenUrn(eq(INVITE_TOKEN_STRING))).thenReturn(inviteTokenUrn); - when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))).thenReturn(true); - when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))).thenReturn(roleUrn); + when(_inviteTokenService.isInviteTokenValid(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(true); + when(_inviteTokenService.getInviteTokenRole(eq(inviteTokenUrn), eq(_authentication))) + .thenReturn(roleUrn); Actor actor = mock(Actor.class); when(_authentication.getActor()).thenReturn(actor); when(actor.toUrnStr()).thenReturn(ACTOR_URN_STRING); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java index 85891dbd96fb08..6411728552a1ec 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/BatchAssignRoleResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authorization.role.RoleService; import com.google.common.collect.ImmutableList; @@ -11,11 +15,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchAssignRoleResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String FIRST_ACTOR_URN_STRING = "urn:li:corpuser:foo"; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java index 8d8faf5c3f12eb..9197d1b18c0c9c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/CreateInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(true), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); CreateInviteTokenInput input = new CreateInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java index ef426979953d06..8e761454cb06c3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/GetInviteTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.invite.InviteTokenService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class GetInviteTokenResolverTest { private static final String ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String INVITE_TOKEN_STRING = "inviteToken"; @@ -43,12 +42,14 @@ public void testPasses() throws Exception { QueryContext mockContext = getMockAllowContext(); when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))).thenReturn(INVITE_TOKEN_STRING); + when(_inviteTokenService.getInviteToken(any(), eq(false), eq(_authentication))) + .thenReturn(INVITE_TOKEN_STRING); GetInviteTokenInput input = new GetInviteTokenInput(); input.setRoleUrn(ROLE_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); - assertEquals(_resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); + assertEquals( + _resolver.get(_dataFetchingEnvironment).join().getInviteToken(), INVITE_TOKEN_STRING); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java index 4a0b062c67ffd2..d956295faa180f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/role/ListRolesResolverTest.java @@ -1,5 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.role; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.ArgumentMatchers.*; +import static org.mockito.Mockito.*; +import static org.mockito.Mockito.anyInt; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -24,14 +31,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.ArgumentMatchers.*; -import static org.mockito.Mockito.anyInt; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class ListRolesResolverTest { private static final String ADMIN_ROLE_URN_STRING = "urn:li:dataHubRole:Admin"; private static final String EDITOR_ROLE_URN_STRING = "urn:li:dataHubRole:Editor"; @@ -47,8 +46,11 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); dataHubRoleInfo.setDescription(roleUrn.toString()); dataHubRoleInfo.setName(roleUrn.toString()); - entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + entityResponse.setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); return entityResponse; } @@ -57,8 +59,12 @@ private EntityResponse getMockRoleEntityResponse(Urn roleUrn) { public void setupTest() throws Exception { Urn adminRoleUrn = Urn.createFromString(ADMIN_ROLE_URN_STRING); Urn editorRoleUrn = Urn.createFromString(EDITOR_ROLE_URN_STRING); - _entityResponseMap = ImmutableMap.of(adminRoleUrn, getMockRoleEntityResponse(adminRoleUrn), editorRoleUrn, - getMockRoleEntityResponse(editorRoleUrn)); + _entityResponseMap = + ImmutableMap.of( + adminRoleUrn, + getMockRoleEntityResponse(adminRoleUrn), + editorRoleUrn, + getMockRoleEntityResponse(editorRoleUrn)); _entityClient = mock(EntityClient.class); _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); @@ -84,14 +90,28 @@ public void testListRoles() throws Exception { ListRolesInput input = new ListRolesInput(); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); final SearchResult roleSearchResult = - new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(2); - roleSearchResult.setEntities(new SearchEntityArray( - ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), - new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); - - when(_entityClient.search(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), anyInt(), anyInt(), any(), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn(roleSearchResult); - when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + new SearchResult() + .setMetadata(new SearchResultMetadata()) + .setFrom(0) + .setPageSize(10) + .setNumEntities(2); + roleSearchResult.setEntities( + new SearchEntityArray( + ImmutableList.of( + new SearchEntity().setEntity(Urn.createFromString(ADMIN_ROLE_URN_STRING)), + new SearchEntity().setEntity(Urn.createFromString(EDITOR_ROLE_URN_STRING))))); + + when(_entityClient.search( + eq(DATAHUB_ROLE_ENTITY_NAME), + any(), + any(), + anyInt(), + anyInt(), + any(), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(DATAHUB_ROLE_ENTITY_NAME), any(), any(), any())) + .thenReturn(_entityResponseMap); ListRolesResult result = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(result.getStart(), 0); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java index c161a66d3ee933..c7d397c5a4a73a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AggregateAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -30,61 +33,49 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.Collections; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; - -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class AggregateAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); List facets = ImmutableList.of("platform", "domains"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -94,18 +85,16 @@ public static void testApplyViewNullBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", viewFilter, // Verify that view filter was used. 0, 0, facets // Verify called with facets we provide - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -113,42 +102,44 @@ public static void testApplyViewBaseFilter() throws Exception { Filter viewFilter = createFilter("field", "test"); DataHubViewInfo info = getViewInfo(viewFilter); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); Filter baseFilter = createFilter("baseField.keyword", "baseTest"); - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null - ); + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -158,18 +149,15 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, 0, - null - ); + null); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -178,36 +166,28 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of("platform"); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - facets, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - null, - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + facets, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput(null, "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -218,18 +198,17 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, facets // Verify facets passed in were used - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test @@ -238,36 +217,29 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { DataHubViewInfo info = getViewInfo(viewFilter); List facets = ImmutableList.of(); - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - facets, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", facets, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -278,55 +250,50 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, 0, null // Verify that an empty list for facets in input sends null - ); + ); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 0, - null, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 0, + null, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -334,45 +301,31 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 0, - null - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 0, null); } @Test public static void testErrorFetchingResults() throws Exception { - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final AggregateAcrossEntitiesResolver resolver = new AggregateAcrossEntitiesResolver(mockClient, mockService); - final AggregateAcrossEntitiesInput testInput = new AggregateAcrossEntitiesInput( - Collections.emptyList(), - "", - null, - null, - TEST_VIEW_URN.toString(), - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final AggregateAcrossEntitiesResolver resolver = + new AggregateAcrossEntitiesResolver(mockClient, mockService); + final AggregateAcrossEntitiesInput testInput = + new AggregateAcrossEntitiesInput( + Collections.emptyList(), "", null, null, TEST_VIEW_URN.toString(), null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -383,17 +336,18 @@ public static void testErrorFetchingResults() throws Exception { private static Filter createFilter(String field, String value) { return new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField(field) - .setValue(value) - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of(value))) - )) - ))); + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField(field) + .setValue(value) + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues(new StringArray(ImmutableList.of(value)))))))); } private static DataHubViewInfo getViewInfo(Filter viewFilter) { @@ -402,24 +356,20 @@ private static DataHubViewInfo getViewInfo(Filter viewFilter) { info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); return info; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -430,22 +380,21 @@ private static EntityClient initMockEntityClient( int start, int limit, List facets, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class), - Mockito.eq(facets) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class), + Mockito.eq(facets))) + .thenReturn(result); return client; } @@ -456,8 +405,8 @@ private static void verifyMockEntityClient( Filter filter, int start, int limit, - List facets - ) throws Exception { + List facets) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -468,21 +417,13 @@ private static void verifyMockEntityClient( Mockito.eq(null), Mockito.eq(null), Mockito.any(Authentication.class), - Mockito.eq(facets) - ); + Mockito.eq(facets)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private AggregateAcrossEntitiesResolverTest() { } - + private AggregateAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java index 7397ea8fa21cfc..3b69337acfbd0e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/AutoCompleteForMultipleResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -32,14 +34,12 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - public class AutoCompleteForMultipleResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private AutoCompleteForMultipleResolverTest() { } + private AutoCompleteForMultipleResolverTest() {} public static void testAutoCompleteResolverSuccess( EntityClient mockClient, @@ -48,9 +48,10 @@ public static void testAutoCompleteResolverSuccess( EntityType entityType, SearchableEntityType entity, Urn viewUrn, - Filter filter - ) throws Exception { - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); + Filter filter) + throws Exception { + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver(ImmutableList.of(entity), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -65,13 +66,7 @@ public static void testAutoCompleteResolverSuccess( Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - entityName, - "test", - filter, - 10 - ); + verifyMockEntityClient(mockClient, entityName, "test", filter, 10); } // test our main entity types @@ -79,43 +74,64 @@ public static void testAutoCompleteResolverSuccess( public static void testAutoCompleteResolverSuccessForDifferentEntities() throws Exception { ViewService viewService = initMockViewService(null, null); // Daatasets - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATASET_ENTITY_NAME, EntityType.DATASET, new DatasetType(mockClient), null, null); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATASET_ENTITY_NAME, + EntityType.DATASET, + new DatasetType(mockClient), + null, + null); // Dashboards - mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DASHBOARD_ENTITY_NAME, EntityType.DASHBOARD, new DashboardType(mockClient), null, null); + mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DASHBOARD_ENTITY_NAME, + EntityType.DASHBOARD, + new DashboardType(mockClient), + null, + null); - //DataFlows - mockClient = initMockEntityClient( - Constants.DATA_FLOW_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); - testAutoCompleteResolverSuccess(mockClient, viewService, Constants.DATA_FLOW_ENTITY_NAME, EntityType.DATA_FLOW, new DataFlowType(mockClient), null, null); + // DataFlows + mockClient = + initMockEntityClient( + Constants.DATA_FLOW_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); + testAutoCompleteResolverSuccess( + mockClient, + viewService, + Constants.DATA_FLOW_ENTITY_NAME, + EntityType.DATA_FLOW, + new DataFlowType(mockClient), + null, + null); } // test filters with a given view @@ -123,16 +139,16 @@ public static void testAutoCompleteResolverSuccessForDifferentEntities() throws public static void testAutoCompleteResolverWithViewFilter() throws Exception { DataHubViewInfo viewInfo = createViewInfo(new StringArray()); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DATASET_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DATASET_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); testAutoCompleteResolverSuccess( mockClient, viewService, @@ -140,8 +156,7 @@ public static void testAutoCompleteResolverWithViewFilter() throws Exception { EntityType.DATASET, new DatasetType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); } // test entity type filters with a given view @@ -152,16 +167,16 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti entityNames.add(Constants.DASHBOARD_ENTITY_NAME); DataHubViewInfo viewInfo = createViewInfo(entityNames); ViewService viewService = initMockViewService(TEST_VIEW_URN, viewInfo); - EntityClient mockClient = initMockEntityClient( - Constants.DASHBOARD_ENTITY_NAME, - "test", - null, - 10, - new AutoCompleteResult() - .setQuery("test") - .setEntities(new AutoCompleteEntityArray()) - .setSuggestions(new StringArray()) - ); + EntityClient mockClient = + initMockEntityClient( + Constants.DASHBOARD_ENTITY_NAME, + "test", + null, + 10, + new AutoCompleteResult() + .setQuery("test") + .setEntities(new AutoCompleteEntityArray()) + .setSuggestions(new StringArray())); // ensure we do hit the entity client for dashboards since dashboards are in our view testAutoCompleteResolverSuccess( @@ -171,25 +186,26 @@ public static void testAutoCompleteResolverWithViewEntityFilter() throws Excepti EntityType.DASHBOARD, new DashboardType(mockClient), TEST_VIEW_URN, - viewInfo.getDefinition().getFilter() - ); + viewInfo.getDefinition().getFilter()); - // if the view has only dashboards, we should not make an auto-complete request on other entity types + // if the view has only dashboards, we should not make an auto-complete request on other entity + // types Mockito.verify(mockClient, Mockito.times(0)) .autoComplete( Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq("test"), Mockito.eq(viewInfo.getDefinition().getFilter()), Mockito.eq(10), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } @Test public static void testAutoCompleteResolverFailNoQuery() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); ViewService viewService = initMockViewService(null, null); - final AutoCompleteForMultipleResolver resolver = new AutoCompleteForMultipleResolver(ImmutableList.of(new DatasetType(mockClient)), viewService); + final AutoCompleteForMultipleResolver resolver = + new AutoCompleteForMultipleResolver( + ImmutableList.of(new DatasetType(mockClient)), viewService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -204,75 +220,60 @@ public static void testAutoCompleteResolverFailNoQuery() throws Exception { } private static EntityClient initMockEntityClient( - String entityName, - String query, - Filter filters, - int limit, - AutoCompleteResult result - ) throws Exception { + String entityName, String query, Filter filters, int limit, AutoCompleteResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.autoComplete( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filters), - Mockito.eq(limit), - Mockito.any(Authentication.class) - )).thenReturn(result); + Mockito.when( + client.autoComplete( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filters), + Mockito.eq(limit), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } - + private static void verifyMockEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filters, - int limit - ) throws Exception { + EntityClient mockClient, String entityName, String query, Filter filters, int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .autoComplete( Mockito.eq(entityName), Mockito.eq(query), Mockito.eq(filters), Mockito.eq(limit), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } private static DataHubViewInfo createViewInfo(StringArray entityNames) { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(entityNames) - .setFilter(viewFilter) - ); + info.setDefinition( + new DataHubViewDefinition().setEntityTypes(entityNames).setFilter(viewFilter)); return info; } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java index a599117c3e165a..29a2b3081aefe3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/GetQuickFiltersResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -20,20 +23,16 @@ import com.linkedin.metadata.service.ViewService; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; -import org.mockito.Mockito; -import org.testng.Assert; -import org.testng.annotations.Test; - -import javax.annotation.Nonnull; -import javax.annotation.Nullable; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CompletionException; import java.util.stream.Collectors; import java.util.stream.IntStream; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.SEARCHABLE_ENTITY_TYPES; +import javax.annotation.Nonnull; +import javax.annotation.Nullable; +import org.mockito.Mockito; +import org.testng.Assert; +import org.testng.annotations.Test; public class GetQuickFiltersResolverTest { @@ -41,19 +40,21 @@ public class GetQuickFiltersResolverTest { public static void testGetQuickFiltersHappyPathSuccess() throws Exception { SearchResultMetadata mockData = getHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -72,19 +73,21 @@ public static void testGetQuickFiltersHappyPathSuccess() throws Exception { public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { SearchResultMetadata mockData = getUnHappyPathTestData(); ViewService mockService = Mockito.mock(ViewService.class); - EntityClient mockClient = initMockEntityClient( - SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()), - "*", - null, - 0, - 0, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(mockData) - ); + EntityClient mockClient = + initMockEntityClient( + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()), + "*", + null, + 0, + 0, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(mockData)); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -103,16 +106,17 @@ public static void testGetQuickFiltersUnhappyPathSuccess() throws Exception { public static void testGetQuickFiltersFailure() throws Exception { ViewService mockService = Mockito.mock(ViewService.class); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); final GetQuickFiltersResolver resolver = new GetQuickFiltersResolver(mockClient, mockService); @@ -124,26 +128,36 @@ public static void testGetQuickFiltersFailure() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static void compareResultToExpectedData(GetQuickFiltersResult result, GetQuickFiltersResult expected) { - IntStream.range(0, result.getQuickFilters().size()).forEach(index -> { - QuickFilter resultFilter = result.getQuickFilters().get(index); - QuickFilter expectedFilter = expected.getQuickFilters().get(index); - Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); - Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); - if (resultFilter.getEntity() != null) { - Assert.assertEquals(resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); - } - }); + private static void compareResultToExpectedData( + GetQuickFiltersResult result, GetQuickFiltersResult expected) { + IntStream.range(0, result.getQuickFilters().size()) + .forEach( + index -> { + QuickFilter resultFilter = result.getQuickFilters().get(index); + QuickFilter expectedFilter = expected.getQuickFilters().get(index); + Assert.assertEquals(resultFilter.getField(), expectedFilter.getField()); + Assert.assertEquals(resultFilter.getValue(), expectedFilter.getValue()); + if (resultFilter.getEntity() != null) { + Assert.assertEquals( + resultFilter.getEntity().getUrn(), expectedFilter.getEntity().getUrn()); + } + }); } private static SearchResultMetadata getHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 100, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 99, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 98, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:bigquery", 97, "urn:li:dataPlatform:bigquery")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:test", 1, "urn:li:dataPlatform:test")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:custom", 96, "urn:li:dataPlatform:custom")); FilterValueArray entityTypeFilters = new FilterValueArray(); entityTypeFilters.add(createFilterValue("dataset", 100, null)); @@ -168,11 +182,18 @@ private static GetQuickFiltersResult getHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List quickFilters = new ArrayList<>(); // platforms should be in alphabetical order - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:bigquery", "urn:li:dataPlatform:bigquery")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:custom", "urn:li:dataPlatform:custom")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DASHBOARD", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_FLOW", null)); @@ -186,9 +207,12 @@ private static GetQuickFiltersResult getHappyPathResultData() { private static SearchResultMetadata getUnHappyPathTestData() { FilterValueArray platformFilterValues = new FilterValueArray(); // only 3 platforms available - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); - platformFilterValues.add(createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:snowflake", 98, "urn:li:dataPlatform:snowflake")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:looker", 100, "urn:li:dataPlatform:looker")); + platformFilterValues.add( + createFilterValue("urn:li:dataPlatform:dbt", 99, "urn:li:dataPlatform:dbt")); FilterValueArray entityTypeFilters = new FilterValueArray(); // no dashboard, data flows, or glossary terms @@ -210,10 +234,15 @@ private static SearchResultMetadata getUnHappyPathTestData() { private static GetQuickFiltersResult getUnHappyPathResultData() { GetQuickFiltersResult result = new GetQuickFiltersResult(); List quickFilters = new ArrayList<>(); - // in correct order by count for platforms (alphabetical). In correct order by priority for entity types - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); - quickFilters.add(createQuickFilter("platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); + // in correct order by count for platforms (alphabetical). In correct order by priority for + // entity types + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:dbt", "urn:li:dataPlatform:dbt")); + quickFilters.add( + createQuickFilter("platform", "urn:li:dataPlatform:looker", "urn:li:dataPlatform:looker")); + quickFilters.add( + createQuickFilter( + "platform", "urn:li:dataPlatform:snowflake", "urn:li:dataPlatform:snowflake")); quickFilters.add(createQuickFilter("_entityType", "DATASET", null)); quickFilters.add(createQuickFilter("_entityType", "DATA_JOB", null)); quickFilters.add(createQuickFilter("_entityType", "CHART", null)); @@ -224,7 +253,8 @@ private static GetQuickFiltersResult getUnHappyPathResultData() { return result; } - private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { + private static QuickFilter createQuickFilter( + @Nonnull final String field, @Nonnull final String value, @Nullable final String entityUrn) { QuickFilter quickFilter = new QuickFilter(); quickFilter.setField(field); quickFilter.setValue(value); @@ -234,7 +264,8 @@ private static QuickFilter createQuickFilter(@Nonnull final String field, @Nonnu return quickFilter; } - private static FilterValue createFilterValue(@Nonnull final String value, final int count, @Nullable final String entity) { + private static FilterValue createFilterValue( + @Nonnull final String value, final int count, @Nullable final String entity) { FilterValue filterValue = new FilterValue(); filterValue.setValue(value); filterValue.setFacetCount(count); @@ -244,7 +275,8 @@ private static FilterValue createFilterValue(@Nonnull final String value, final return filterValue; } - private static AggregationMetadata createAggregationMetadata(@Nonnull final String name, @Nonnull final FilterValueArray filterValues) { + private static AggregationMetadata createAggregationMetadata( + @Nonnull final String name, @Nonnull final FilterValueArray filterValues) { AggregationMetadata aggregationMetadata = new AggregationMetadata(); aggregationMetadata.setName(name); aggregationMetadata.setFilterValues(filterValues); @@ -257,24 +289,22 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } - private GetQuickFiltersResolverTest() { } - + private GetQuickFiltersResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java index b0a681c9b23423..d0bbfd126b9b96 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -38,167 +41,172 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.datahub.graphql.resolvers.search.SearchUtils.*; - - public class SearchAcrossEntitiesResolverTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - @Test public static void testApplyViewNullBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockEntityClient( + mockClient, + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", - viewFilter, + viewFilter, // Verify that view filter was used. 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockEntityClient( - mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. - "", - viewFilter, // Verify that view filter was used. - 0, - 10 - ); - - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + 10); + + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewBaseFilter() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("baseField.keyword") - .setValue("baseTest") - .setCondition(Condition.EQUAL) - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("baseTest"))) - )) - ))); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME), - "", - SearchUtils.combineFilters(baseFilter, viewFilter), - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - ImmutableList.of(EntityType.DATASET), - "", - 0, - 10, - null, - ImmutableList.of( - new AndFilterInput(ImmutableList.of( - new FacetFilterInput("baseField", "baseTest", ImmutableList.of("baseTest"), false, FilterOperator.EQUAL) - )) - ), - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("baseField.keyword") + .setValue("baseTest") + .setCondition(Condition.EQUAL) + .setNegated(false) + .setValues( + new StringArray(ImmutableList.of("baseTest")))))))); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME), + "", + SearchUtils.combineFilters(baseFilter, viewFilter), + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + ImmutableList.of(EntityType.DATASET), + "", + 0, + 10, + null, + ImmutableList.of( + new AndFilterInput( + ImmutableList.of( + new FacetFilterInput( + "baseField", + "baseTest", + ImmutableList.of("baseTest"), + false, + FilterOperator.EQUAL)))), + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -208,74 +216,66 @@ public static void testApplyViewBaseFilter() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME), // Verify that merged entity types were used. "", SearchUtils.combineFilters(baseFilter, viewFilter), // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewNullBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - null, - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + null, "", 0, 10, null, null, TEST_VIEW_URN.toString(), null, null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -285,74 +285,75 @@ public static void testApplyViewNullBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewEmptyBaseEntityTypes() throws Exception { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); DataHubViewInfo info = new DataHubViewInfo(); info.setName("test"); info.setType(DataHubViewType.GLOBAL); info.setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); info.setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)); - info.setDefinition(new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(viewFilter) - ); - - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - info - ); - - EntityClient mockClient = initMockEntityClient( - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), - "", - viewFilter, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + info.setDefinition( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter(viewFilter)); + + ViewService mockService = initMockViewService(TEST_VIEW_URN, info); + + EntityClient mockClient = + initMockEntityClient( + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), + "", + viewFilter, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -362,56 +363,55 @@ public static void testApplyViewEmptyBaseEntityTypes() throws Exception { verifyMockEntityClient( mockClient, - ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME), // Verify that view entity types were honored. "", viewFilter, // Verify that merged filters were used. 0, - 10 - ); + 10); - verifyMockViewService( - mockService, - TEST_VIEW_URN - ); + verifyMockViewService(mockService, TEST_VIEW_URN); } @Test public static void testApplyViewViewDoesNotExist() throws Exception { // When a view does not exist, the endpoint should WARN and not apply the view. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); - - List searchEntityTypes = SEARCHABLE_ENTITY_TYPES.stream().map(EntityTypeMapper::getName).collect(Collectors.toList()); - - EntityClient mockClient = initMockEntityClient( - searchEntityTypes, - "", - null, - 0, - 10, - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); + + List searchEntityTypes = + SEARCHABLE_ENTITY_TYPES.stream() + .map(EntityTypeMapper::getName) + .collect(Collectors.toList()); + + EntityClient mockClient = + initMockEntityClient( + searchEntityTypes, + "", + null, + 0, + 10, + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -419,49 +419,41 @@ public static void testApplyViewViewDoesNotExist() throws Exception { resolver.get(mockEnv).get(); - verifyMockEntityClient( - mockClient, - searchEntityTypes, - "", - null, - 0, - 10 - ); + verifyMockEntityClient(mockClient, searchEntityTypes, "", null, 0, 10); } @Test public static void testApplyViewErrorFetchingView() throws Exception { // When a view cannot be successfully resolved, the endpoint show THROW. - ViewService mockService = initMockViewService( - TEST_VIEW_URN, - null - ); + ViewService mockService = initMockViewService(TEST_VIEW_URN, null); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.searchAcrossEntities( - Mockito.anyList(), - Mockito.anyString(), - Mockito.any(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenThrow(new RemoteInvocationException()); - - final SearchAcrossEntitiesResolver resolver = new SearchAcrossEntitiesResolver(mockClient, mockService); - final SearchAcrossEntitiesInput testInput = new SearchAcrossEntitiesInput( - Collections.emptyList(), // Empty Entity Types - "", - 0, - 10, - null, - null, - TEST_VIEW_URN.toString(), - null, - null - ); + Mockito.when( + mockClient.searchAcrossEntities( + Mockito.anyList(), + Mockito.anyString(), + Mockito.any(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenThrow(new RemoteInvocationException()); + + final SearchAcrossEntitiesResolver resolver = + new SearchAcrossEntitiesResolver(mockClient, mockService); + final SearchAcrossEntitiesInput testInput = + new SearchAcrossEntitiesInput( + Collections.emptyList(), // Empty Entity Types + "", + 0, + 10, + null, + null, + TEST_VIEW_URN.toString(), + null, + null); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); @@ -470,17 +462,10 @@ public static void testApplyViewErrorFetchingView() throws Exception { Assert.assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static ViewService initMockViewService( - Urn viewUrn, - DataHubViewInfo viewInfo - ) { + private static ViewService initMockViewService(Urn viewUrn, DataHubViewInfo viewInfo) { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - )).thenReturn( - viewInfo - ); + Mockito.when(service.getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class))) + .thenReturn(viewInfo); return service; } @@ -490,21 +475,20 @@ private static EntityClient initMockEntityClient( Filter filter, int start, int limit, - SearchResult result - ) throws Exception { + SearchResult result) + throws Exception { EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.searchAcrossEntities( - Mockito.eq(entityTypes), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.eq(null), - Mockito.eq(null), - Mockito.any(Authentication.class) - )).thenReturn( - result - ); + Mockito.when( + client.searchAcrossEntities( + Mockito.eq(entityTypes), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.eq(null), + Mockito.eq(null), + Mockito.any(Authentication.class))) + .thenReturn(result); return client; } @@ -514,8 +498,8 @@ private static void verifyMockEntityClient( String query, Filter filter, int start, - int limit - ) throws Exception { + int limit) + throws Exception { Mockito.verify(mockClient, Mockito.times(1)) .searchAcrossEntities( Mockito.eq(entityTypes), @@ -525,21 +509,13 @@ private static void verifyMockEntityClient( Mockito.eq(limit), Mockito.eq(null), Mockito.eq(null), - Mockito.any(Authentication.class) - ); + Mockito.any(Authentication.class)); } - private static void verifyMockViewService( - ViewService mockService, - Urn viewUrn - ) { + private static void verifyMockViewService(ViewService mockService, Urn viewUrn) { Mockito.verify(mockService, Mockito.times(1)) - .getViewInfo( - Mockito.eq(viewUrn), - Mockito.any(Authentication.class) - ); + .getViewInfo(Mockito.eq(viewUrn), Mockito.any(Authentication.class)); } - private SearchAcrossEntitiesResolverTest() { } - + private SearchAcrossEntitiesResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java index c68b621e6921f2..273f7156c12a8b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.UrnArrayArray; import com.linkedin.common.urn.UrnUtils; @@ -23,15 +27,12 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - // Initialize this class in the style of SearchAcrossEntitiesResolverTest.java public class SearchAcrossLineageResolverTest { - private static final String SOURCE_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; - private static final String TARGET_URN_STRING = "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; + private static final String SOURCE_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,bar,PROD)"; + private static final String TARGET_URN_STRING = + "urn:li:dataset:(urn:li:dataPlatform:foo,baz,PROD)"; private static final String QUERY = ""; private static final int START = 0; private static final int COUNT = 10; @@ -87,19 +88,20 @@ public void testSearchAcrossLineage() throws Exception { lineageSearchResult.setEntities(new LineageSearchEntityArray(lineageSearchEntity)); when(_entityClient.searchAcrossLineage( - eq(UrnUtils.getUrn(SOURCE_URN_STRING)), - eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), - anyList(), - eq(QUERY), - eq(null), - any(), - eq(null), - eq(START), - eq(COUNT), - eq(START_TIMESTAMP_MILLIS), - eq(END_TIMESTAMP_MILLIS), - eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), - eq(_authentication))).thenReturn(lineageSearchResult); + eq(UrnUtils.getUrn(SOURCE_URN_STRING)), + eq(com.linkedin.metadata.graph.LineageDirection.DOWNSTREAM), + anyList(), + eq(QUERY), + eq(null), + any(), + eq(null), + eq(START), + eq(COUNT), + eq(START_TIMESTAMP_MILLIS), + eq(END_TIMESTAMP_MILLIS), + eq(new SearchFlags().setFulltext(true).setSkipHighlighting(true)), + eq(_authentication))) + .thenReturn(lineageSearchResult); final SearchAcrossLineageResults results = _resolver.get(_dataFetchingEnvironment).join(); assertEquals(results.getCount(), 10); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java index 6ba8b3cefe5046..24724cb8e23ad3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolverTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.search; +import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; @@ -16,134 +18,107 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; - - public class SearchResolverTest { - @Test - public void testDefaultSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(true) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - @Test - public void testOverrideSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchFlags inputSearchFlags = new SearchFlags(); - inputSearchFlags.setFulltext(false); - inputSearchFlags.setSkipAggregates(true); - inputSearchFlags.setSkipHighlighting(true); - inputSearchFlags.setMaxAggValues(10); - inputSearchFlags.setSkipCache(true); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "", - 1, - 11, - null, - null, - inputSearchFlags - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "", - null, - null, - 1, - 11, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(false) - .setSkipAggregates(true) - .setSkipHighlighting(true) - .setMaxAggValues(10) - .setSkipCache(true) - ); - } - - @Test - public void testNonWildCardSearchFlags() throws Exception { - EntityClient mockClient = initMockSearchEntityClient(); - final SearchResolver resolver = new SearchResolver(mockClient); - - final SearchInput testInput = new SearchInput( - EntityType.DATASET, - "not a wildcard", - 0, - 10, - null, - null, - null - ); - DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - QueryContext mockContext = getMockAllowContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); - Mockito.when(mockEnv.getContext()).thenReturn(mockContext); - - resolver.get(mockEnv).get(); - - verifyMockSearchEntityClient( - mockClient, - Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. - "not a wildcard", - null, // Verify that view filter was used. - null, - 0, - 10, - new com.linkedin.metadata.query.SearchFlags() - .setFulltext(true) - .setSkipAggregates(false) - .setSkipHighlighting(false) // empty/wildcard - .setMaxAggValues(20) - .setSkipCache(false) - ); - } - - private EntityClient initMockSearchEntityClient() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - Mockito.when(client.search( + @Test + public void testDefaultSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = new SearchInput(EntityType.DATASET, "", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(true) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + @Test + public void testOverrideSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchFlags inputSearchFlags = new SearchFlags(); + inputSearchFlags.setFulltext(false); + inputSearchFlags.setSkipAggregates(true); + inputSearchFlags.setSkipHighlighting(true); + inputSearchFlags.setMaxAggValues(10); + inputSearchFlags.setSkipCache(true); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "", 1, 11, null, null, inputSearchFlags); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "", + null, + null, + 1, + 11, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(false) + .setSkipAggregates(true) + .setSkipHighlighting(true) + .setMaxAggValues(10) + .setSkipCache(true)); + } + + @Test + public void testNonWildCardSearchFlags() throws Exception { + EntityClient mockClient = initMockSearchEntityClient(); + final SearchResolver resolver = new SearchResolver(mockClient); + + final SearchInput testInput = + new SearchInput(EntityType.DATASET, "not a wildcard", 0, 10, null, null, null); + DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); + QueryContext mockContext = getMockAllowContext(); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(testInput); + Mockito.when(mockEnv.getContext()).thenReturn(mockContext); + + resolver.get(mockEnv).get(); + + verifyMockSearchEntityClient( + mockClient, + Constants.DATASET_ENTITY_NAME, // Verify that merged entity types were used. + "not a wildcard", + null, // Verify that view filter was used. + null, + 0, + 10, + new com.linkedin.metadata.query.SearchFlags() + .setFulltext(true) + .setSkipAggregates(false) + .setSkipHighlighting(false) // empty/wildcard + .setMaxAggValues(20) + .setSkipCache(false)); + } + + private EntityClient initMockSearchEntityClient() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + Mockito.when( + client.search( Mockito.anyString(), Mockito.anyString(), Mockito.any(), @@ -151,40 +126,38 @@ private EntityClient initMockSearchEntityClient() throws Exception { Mockito.anyInt(), Mockito.anyInt(), Mockito.any(Authentication.class), - Mockito.any() - )).thenReturn( - new SearchResult() - .setEntities(new SearchEntityArray()) - .setNumEntities(0) - .setFrom(0) - .setPageSize(0) - .setMetadata(new SearchResultMetadata()) - ); - return client; - } - - private void verifyMockSearchEntityClient( - EntityClient mockClient, - String entityName, - String query, - Filter filter, - SortCriterion sortCriterion, - int start, - int limit, - com.linkedin.metadata.query.SearchFlags searchFlags - ) throws Exception { - Mockito.verify(mockClient, Mockito.times(1)).search( - Mockito.eq(entityName), - Mockito.eq(query), - Mockito.eq(filter), - Mockito.eq(sortCriterion), - Mockito.eq(start), - Mockito.eq(limit), - Mockito.any(Authentication.class), - Mockito.eq(searchFlags) - ); - } - - private SearchResolverTest() { - } + Mockito.any())) + .thenReturn( + new SearchResult() + .setEntities(new SearchEntityArray()) + .setNumEntities(0) + .setFrom(0) + .setPageSize(0) + .setMetadata(new SearchResultMetadata())); + return client; + } + + private void verifyMockSearchEntityClient( + EntityClient mockClient, + String entityName, + String query, + Filter filter, + SortCriterion sortCriterion, + int start, + int limit, + com.linkedin.metadata.query.SearchFlags searchFlags) + throws Exception { + Mockito.verify(mockClient, Mockito.times(1)) + .search( + Mockito.eq(entityName), + Mockito.eq(query), + Mockito.eq(filter), + Mockito.eq(sortCriterion), + Mockito.eq(start), + Mockito.eq(limit), + Mockito.any(Authentication.class), + Mockito.eq(searchFlags)); + } + + private SearchResolverTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java index b35f7a77b209c9..8f23f0a6245766 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/search/SearchUtilsTest.java @@ -17,16 +17,18 @@ public class SearchUtilsTest { @Test public static void testApplyViewToFilterNullBaseFilter() { - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(null, viewFilter); Assert.assertEquals(viewFilter, result); @@ -34,275 +36,272 @@ public static void testApplyViewToFilterNullBaseFilter() { @Test public static void testApplyViewToFilterComplexBaseFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field") + .setValue("test") + .setValues(new StringArray(ImmutableList.of("test")))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("field") - .setValue("test") - .setValues(new StringArray(ImmutableList.of("test"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("field") + .setValue("test") + .setValues( + new StringArray(ImmutableList.of("test"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterComplexViewFilter() { - Filter baseFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))) - )) - ) - ))); + Filter baseFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues( + new StringArray(ImmutableList.of("test2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues( + new StringArray(ImmutableList.of("test4"))))))))); - Filter viewFilter = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter viewFilter = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ), - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field3") - .setValue("test3") - .setValues(new StringArray(ImmutableList.of("test3"))), - new Criterion() - .setField("field4") - .setValue("test4") - .setValues(new StringArray(ImmutableList.of("test4"))), - new Criterion() - .setField("viewField3") - .setValue("viewTest3") - .setValues(new StringArray(ImmutableList.of("viewTest3"))), - new Criterion() - .setField("viewField4") - .setValue("viewTest4") - .setValues(new StringArray(ImmutableList.of("viewTest4"))) - )) - ) - ))); + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray(ImmutableList.of("viewTest4")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray(ImmutableList.of("viewTest2")))))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field3") + .setValue("test3") + .setValues(new StringArray(ImmutableList.of("test3"))), + new Criterion() + .setField("field4") + .setValue("test4") + .setValues(new StringArray(ImmutableList.of("test4"))), + new Criterion() + .setField("viewField3") + .setValue("viewTest3") + .setValues( + new StringArray(ImmutableList.of("viewTest3"))), + new Criterion() + .setField("viewField4") + .setValue("viewTest4") + .setValues( + new StringArray( + ImmutableList.of("viewTest4"))))))))); Assert.assertEquals(expectedResult, result); } @Test public static void testApplyViewToFilterV1Filter() { - Filter baseFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("field1") - .setValue("test1") - .setValues(new StringArray(ImmutableList.of("test1"))), - new Criterion() - .setField("field2") - .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))) - )) - ); - - Filter viewFilter = new Filter() - .setCriteria( - new CriterionArray(ImmutableList.of( - new Criterion() - .setField("viewField1") - .setValue("viewTest1") - .setValues(new StringArray(ImmutableList.of("viewTest1"))), - new Criterion() - .setField("viewField2") - .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ); - - Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); - - Filter expectedResult = new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of( - new ConjunctiveCriterion().setAnd( - new CriterionArray(ImmutableList.of( + Filter baseFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("field1") .setValue("test1") @@ -310,7 +309,13 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("field2") .setValue("test2") - .setValues(new StringArray(ImmutableList.of("test2"))), + .setValues(new StringArray(ImmutableList.of("test2")))))); + + Filter viewFilter = + new Filter() + .setCriteria( + new CriterionArray( + ImmutableList.of( new Criterion() .setField("viewField1") .setValue("viewTest1") @@ -318,10 +323,38 @@ public static void testApplyViewToFilterV1Filter() { new Criterion() .setField("viewField2") .setValue("viewTest2") - .setValues(new StringArray(ImmutableList.of("viewTest2"))) - )) - ) - ))); + .setValues(new StringArray(ImmutableList.of("viewTest2")))))); + + Filter result = SearchUtils.combineFilters(baseFilter, viewFilter); + + Filter expectedResult = + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setField("field1") + .setValue("test1") + .setValues(new StringArray(ImmutableList.of("test1"))), + new Criterion() + .setField("field2") + .setValue("test2") + .setValues(new StringArray(ImmutableList.of("test2"))), + new Criterion() + .setField("viewField1") + .setValue("viewTest1") + .setValues( + new StringArray(ImmutableList.of("viewTest1"))), + new Criterion() + .setField("viewField2") + .setValue("viewTest2") + .setValues( + new StringArray( + ImmutableList.of("viewTest2"))))))))); Assert.assertEquals(expectedResult, result); } @@ -329,24 +362,17 @@ public static void testApplyViewToFilterV1Filter() { @Test public static void testApplyViewToEntityTypes() { - List baseEntityTypes = ImmutableList.of( - Constants.CHART_ENTITY_NAME, - Constants.DATASET_ENTITY_NAME - ); + List baseEntityTypes = + ImmutableList.of(Constants.CHART_ENTITY_NAME, Constants.DATASET_ENTITY_NAME); - List viewEntityTypes = ImmutableList.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME - ); + List viewEntityTypes = + ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME); final List result = SearchUtils.intersectEntityTypes(baseEntityTypes, viewEntityTypes); - final List expectedResult = ImmutableList.of( - Constants.DATASET_ENTITY_NAME - ); + final List expectedResult = ImmutableList.of(Constants.DATASET_ENTITY_NAME); Assert.assertEquals(expectedResult, result); } - private SearchUtilsTest() { } - + private SearchUtilsTest() {} } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java index 905e913fba909c..553a2c85a7ae2e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/user/UpdateCorpUserViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -15,29 +18,24 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateCorpUserViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = new UpdateCorpUserViewsSettingsInput( - TEST_URN.toString() - ); - private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = new UpdateCorpUserViewsSettingsInput( - null - ); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT = + new UpdateCorpUserViewsSettingsInput(TEST_URN.toString()); + private static final UpdateCorpUserViewsSettingsInput TEST_VIEWS_INPUT_NULL = + new UpdateCorpUserViewsSettingsInput(null); @Test public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -46,25 +44,28 @@ public void testGetSuccessViewSettingsNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessViewSettingsExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -73,26 +74,28 @@ public void testGetSuccessViewSettingsExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews(new CorpUserViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } - @Test public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - ))) - ); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + SettingsService mockService = + initSettingsService( + TEST_USER_URN, + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView")))); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -101,22 +104,26 @@ public void testGetSuccessViewSettingsRemoveDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.eq(new CorpUserSettings() - .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) - .setViews(new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.eq( + new CorpUserSettings() + .setAppearance(new CorpUserAppearanceSettings().setShowSimplifiedHomepage(true)) + .setViews( + new CorpUserViewsSettings().setDefaultView(null, SetMode.IGNORE_NULL))), + Mockito.any(Authentication.class)); } @Test public void testGetCorpUserSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getCorpUserSettings(Mockito.eq(TEST_USER_URN), Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -126,19 +133,18 @@ public void testGetCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateCorpUserSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - TEST_USER_URN, - null - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateCorpUserSettings( - Mockito.eq(TEST_USER_URN), - Mockito.any(CorpUserSettings.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(TEST_USER_URN, null); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateCorpUserSettings( + Mockito.eq(TEST_USER_URN), + Mockito.any(CorpUserSettings.class), + Mockito.any(Authentication.class)); - UpdateCorpUserViewsSettingsResolver resolver = new UpdateCorpUserViewsSettingsResolver(mockService); + UpdateCorpUserViewsSettingsResolver resolver = + new UpdateCorpUserViewsSettingsResolver(mockService); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); @@ -148,17 +154,13 @@ public void testUpdateCorpUserSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - Urn user, - CorpUserSettings existingSettings - ) { + private static SettingsService initSettingsService(Urn user, CorpUserSettings existingSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getCorpUserSettings( - Mockito.eq(user), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getCorpUserSettings(Mockito.eq(user), Mockito.any(Authentication.class))) .thenReturn(existingSettings); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java index 4e2283735b8c97..8f96eae9480f85 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/GlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,10 +17,6 @@ import org.testng.Assert; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class GlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -25,9 +24,7 @@ public class GlobalViewsSettingsResolverTest { @Test public void testGetSuccessNullSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -42,9 +39,7 @@ public void testGetSuccessNullSettings() throws Exception { @Test public void testGetSuccessEmptySettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -53,16 +48,13 @@ public void testGetSuccessEmptySettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertNull( - result.getDefaultView() - ); + Assert.assertNull(result.getDefaultView()); } @Test public void testGetSuccessExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(TEST_URN) - ); + SettingsService mockService = + initSettingsService(new GlobalViewsSettings().setDefaultView(TEST_URN)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(TEST_USER_URN.toString()); @@ -71,17 +63,15 @@ public void testGetSuccessExistingSettings() throws Exception { com.linkedin.datahub.graphql.generated.GlobalViewsSettings result = resolver.get(mockEnv).get(); - Assert.assertEquals( - result.getDefaultView(), - TEST_URN.toString() - ); + Assert.assertEquals(result.getDefaultView(), TEST_URN.toString()); } @Test public void testGetException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); GlobalViewsSettingsResolver resolver = new GlobalViewsSettingsResolver(mockService); @@ -94,9 +84,7 @@ public void testGetException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -107,15 +95,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java index 9ea3c223559cd2..c0cc09052176d2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/settings/view/UpdateGlobalViewsSettingsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.settings.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; @@ -14,22 +17,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateGlobalViewsSettingsResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); - private static final UpdateGlobalViewsSettingsInput TEST_INPUT = new UpdateGlobalViewsSettingsInput( - TEST_URN.toString() - ); + private static final UpdateGlobalViewsSettingsInput TEST_INPUT = + new UpdateGlobalViewsSettingsInput(TEST_URN.toString()); @Test public void testGetSuccessNoExistingSettings() throws Exception { - SettingsService mockService = initSettingsService( - null - ); + SettingsService mockService = initSettingsService(null); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -39,16 +35,17 @@ public void testGetSuccessNoExistingSettings() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessNoDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -58,18 +55,20 @@ public void testGetSuccessNoDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetSuccessExistingDefaultView() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings().setDefaultView(UrnUtils.getUrn( - "urn:li:dataHubView:otherView" - )) - ); + SettingsService mockService = + initSettingsService( + new GlobalViewsSettings() + .setDefaultView(UrnUtils.getUrn("urn:li:dataHubView:otherView"))); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); QueryContext mockContext = getMockAllowContext(); @@ -79,16 +78,20 @@ public void testGetSuccessExistingDefaultView() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).updateGlobalSettings( - Mockito.eq(new GlobalSettingsInfo().setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), - Mockito.any(Authentication.class)); + Mockito.verify(mockService, Mockito.times(1)) + .updateGlobalSettings( + Mockito.eq( + new GlobalSettingsInfo() + .setViews(new GlobalViewsSettings().setDefaultView(TEST_URN))), + Mockito.any(Authentication.class)); } @Test public void testGetGlobalViewsSettingsException() throws Exception { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -100,15 +103,13 @@ public void testGetGlobalViewsSettingsException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - @Test public void testUpdateGlobalViewsSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); - Mockito.doThrow(RuntimeException.class).when(mockService).updateGlobalSettings( - Mockito.any(GlobalSettingsInfo.class), - Mockito.any(Authentication.class)); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateGlobalSettings( + Mockito.any(GlobalSettingsInfo.class), Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -122,11 +123,13 @@ public void testUpdateGlobalViewsSettingsException() throws Exception { @Test public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { - SettingsService mockService = initSettingsService( - null // Should never be null. - ); - Mockito.doThrow(RuntimeException.class).when(mockService).getGlobalSettings( - Mockito.any(Authentication.class)); + SettingsService mockService = + initSettingsService( + null // Should never be null. + ); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .getGlobalSettings(Mockito.any(Authentication.class)); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); @@ -140,9 +143,7 @@ public void testGetGlobalViewsSettingsNoSettingsException() throws Exception { @Test public void testGetUnauthorized() throws Exception { - SettingsService mockService = initSettingsService( - new GlobalViewsSettings() - ); + SettingsService mockService = initSettingsService(new GlobalViewsSettings()); UpdateGlobalViewsSettingsResolver resolver = new UpdateGlobalViewsSettingsResolver(mockService); // Execute resolver @@ -154,15 +155,12 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } - private static SettingsService initSettingsService( - GlobalViewsSettings existingViewSettings - ) { + private static SettingsService initSettingsService(GlobalViewsSettings existingViewSettings) { SettingsService mockService = Mockito.mock(SettingsService.class); - Mockito.when(mockService.getGlobalSettings( - Mockito.any(Authentication.class))) - .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); + Mockito.when(mockService.getGlobalSettings(Mockito.any(Authentication.class))) + .thenReturn(new GlobalSettingsInfo().setViews(existingViewSettings, SetMode.IGNORE_NULL)); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java index 8c4445452c5647..db3e9afab7249c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchGetStepStatesResolverTest.java @@ -1,5 +1,10 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -21,12 +26,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchGetStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final long TIME = 123L; @@ -35,7 +34,8 @@ public class BatchGetStepStatesResolverTest { private static final String SECOND_STEP_STATE_ID = "2"; private static final Urn FIRST_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:1"); private static final Urn SECOND_STEP_STATE_URN = UrnUtils.getUrn("urn:li:dataHubStepState:2"); - private static final Set ASPECTS = ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); + private static final Set ASPECTS = + ImmutableSet.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME); private EntityClient _entityClient; private BatchGetStepStatesResolver _resolver; private DataFetchingEnvironment _dataFetchingEnvironment; @@ -68,15 +68,17 @@ public void testBatchGetStepStatesFirstStepCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set urns = ImmutableSet.of(FIRST_STEP_STATE_URN); - final Map firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map entityResponseMap = ImmutableMap.of(FIRST_STEP_STATE_URN, - TestUtils.buildEntityResponse(firstAspectMap)); + final Map firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map entityResponseMap = + ImmutableMap.of(FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap)); - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); } @@ -100,18 +102,21 @@ public void testBatchGetStepStatesBothStepsCompleted() throws Exception { new DataHubStepStateProperties().setLastModified(AUDIT_STAMP); final Set urns = ImmutableSet.of(FIRST_STEP_STATE_URN, SECOND_STEP_STATE_URN); - final Map firstAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - firstStepStateProperties); - final Map secondAspectMap = ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, - secondStepStateProperties); - final Map entityResponseMap = ImmutableMap.of( - FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), - SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); - - when(_entityClient.batchGetV2(eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) + final Map firstAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, firstStepStateProperties); + final Map secondAspectMap = + ImmutableMap.of(DATAHUB_STEP_STATE_PROPERTIES_ASPECT_NAME, secondStepStateProperties); + final Map entityResponseMap = + ImmutableMap.of( + FIRST_STEP_STATE_URN, TestUtils.buildEntityResponse(firstAspectMap), + SECOND_STEP_STATE_URN, TestUtils.buildEntityResponse(secondAspectMap)); + + when(_entityClient.batchGetV2( + eq(DATAHUB_STEP_STATE_ENTITY_NAME), eq(urns), eq(ASPECTS), eq(_authentication))) .thenReturn(entityResponseMap); - final BatchGetStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchGetStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(2, actualBatchResult.getResults().size()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java index 5f20a11f15ac66..b457498cc547a5 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/step/BatchUpdateStepStatesResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.step; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; import com.datahub.authentication.Authentication; @@ -16,11 +20,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class BatchUpdateStepStatesResolverTest { private static final Urn ACTOR_URN = UrnUtils.getUrn("urn:li:corpuser:test"); private static final String FIRST_STEP_STATE_ID = "1"; @@ -52,7 +51,8 @@ public void testBatchUpdateStepStatesFirstStepCompleted() throws Exception { input.setStates(ImmutableList.of(firstInput)); when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); - final BatchUpdateStepStatesResult actualBatchResult = _resolver.get(_dataFetchingEnvironment).join(); + final BatchUpdateStepStatesResult actualBatchResult = + _resolver.get(_dataFetchingEnvironment).join(); assertNotNull(actualBatchResult); assertEquals(1, actualBatchResult.getResults().size()); verify(_entityClient, times(1)).ingestProposal(any(), eq(_authentication), eq(false)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 268d6a6bc4268d..340802cde467b8 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -20,14 +24,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class AddTagsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -35,11 +35,12 @@ public class AddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -50,46 +51,51 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(originalTags); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TAG_1_URN))).thenReturn(true); @@ -100,41 +106,43 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, proposal); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -145,9 +153,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -159,10 +166,11 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -173,9 +181,8 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -191,9 +198,8 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -206,21 +212,21 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTagsInput input = new AddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), TEST_ENTITY_URN, null, null); + AddTagsInput input = + new AddTagsInput(ImmutableList.of(TEST_TAG_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index 651b89359c83fa..71354627b11452 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -18,21 +22,17 @@ import com.linkedin.metadata.entity.ebean.transactions.AspectsBatchImpl; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; - import java.util.List; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchAddTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -40,19 +40,20 @@ public class BatchAddTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); @@ -64,55 +65,63 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetSuccessExistingTags() throws Exception { - GlobalTags originalTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)))) - ); + GlobalTags originalTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTags); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -126,45 +135,49 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags newTags = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, newTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, newTags); + final GlobalTags newTags = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, newTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, newTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TAG_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TAG_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -175,33 +188,36 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -213,19 +229,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -236,42 +254,47 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTagsInput input = new BatchAddTagsInput(ImmutableList.of( - TEST_TAG_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTagsInput input = + new BatchAddTagsInput( + ImmutableList.of(TEST_TAG_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index f302540eba9048..8cd10afee293ea 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlobalTags; @@ -26,15 +30,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class BatchRemoveTagsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @@ -42,15 +43,17 @@ public class BatchRemoveTagsResolverTest { public void testGetSuccessNoExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -64,22 +67,25 @@ public void testGetSuccessNoExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); proposal2.setEntityType(Constants.DATASET_ENTITY_NAME); proposal2.setAspectName(Constants.GLOBAL_TAGS_ASPECT_NAME); @@ -93,25 +99,33 @@ public void testGetSuccessNoExistingTags() throws Exception { public void testGetSuccessExistingTags() throws Exception { EntityService mockService = getMockEntityService(); - final GlobalTags oldTags1 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags1 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)), + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags1); - final GlobalTags oldTags2 = new GlobalTags().setTags(new TagAssociationArray(ImmutableList.of( - new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + final GlobalTags oldTags2 = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of( + new TagAssociation().setTag(TagUrn.createFromString(TEST_TAG_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTags2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -125,22 +139,25 @@ public void testGetSuccessExistingTags() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - final GlobalTags emptyTags = new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); + final GlobalTags emptyTags = + new GlobalTags().setTags(new TagAssociationArray(Collections.emptyList())); - final MetadataChangeProposal proposal1 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_1), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); - final MetadataChangeProposal proposal2 = MutationUtils.buildMetadataChangeProposalWithUrn(Urn.createFromString(TEST_ENTITY_URN_2), - GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal1 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_1), GLOBAL_TAGS_ASPECT_NAME, emptyTags); + final MetadataChangeProposal proposal2 = + MutationUtils.buildMetadataChangeProposalWithUrn( + Urn.createFromString(TEST_ENTITY_URN_2), GLOBAL_TAGS_ASPECT_NAME, emptyTags); verifyIngestProposal(mockService, 1, List.of(proposal1, proposal2)); } @@ -149,15 +166,17 @@ public void testGetSuccessExistingTags() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOBAL_TAGS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -169,19 +188,21 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -192,44 +213,49 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTagsInput input = new BatchRemoveTagsInput(ImmutableList.of( - TEST_TAG_1_URN, - TEST_TAG_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTagsInput input = + new BatchRemoveTagsInput( + ImmutableList.of(TEST_TAG_1_URN, TEST_TAG_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java index f801daf4f2a3f1..dac7104ca29305 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolverTest.java @@ -1,39 +1,36 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.tag.TagProperties; import com.linkedin.metadata.key.TagKey; import com.linkedin.mxe.MetadataChangeProposal; +import com.linkedin.tag.TagProperties; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; - import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class CreateTagResolverTest { - private static final CreateTagInput TEST_INPUT = new CreateTagInput( - "test-id", - "test-name", - "test-description" - ); + private static final CreateTagInput TEST_INPUT = + new CreateTagInput("test-id", "test-name", "test-description"); @Test public void testGetSuccess() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.ingestProposal(Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) + Mockito.when( + mockClient.ingestProposal( + Mockito.any(MetadataChangeProposal.class), Mockito.any(Authentication.class))) .thenReturn(String.format("urn:li:tag:%s", TEST_INPUT.getId())); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); @@ -50,15 +47,13 @@ public void testGetSuccess() throws Exception { TagProperties props = new TagProperties(); props.setDescription("test-description"); props.setName("test-name"); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithKey(key, TAG_ENTITY_NAME, - TAG_PROPERTIES_ASPECT_NAME, props); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithKey( + key, TAG_ENTITY_NAME, TAG_PROPERTIES_ASPECT_NAME, props); // Not ideal to match against "any", but we don't know the auto-generated execution request id - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); } @Test @@ -75,9 +70,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -85,10 +79,9 @@ public void testGetEntityClientException() throws Exception { // Create resolver EntityService mockService = getMockEntityService(); EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RuntimeException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RuntimeException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTagResolver resolver = new CreateTagResolver(mockClient, mockService); // Execute resolver @@ -99,4 +92,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java index b01ac1a9b14ae9..11dfad43d57313 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/DeleteTagResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTagResolverTest { private static final String TEST_URN = "urn:li:tag:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java index b5bbf0775a8bae..6ae72fcbb72688 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/SetTagColorResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.tag; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -23,11 +27,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static com.linkedin.metadata.Constants.*; -import static org.testng.Assert.*; - - public class SetTagColorResolverTest { private static final String TEST_ENTITY_URN = "urn:li:tag:test-tag"; @@ -41,10 +40,11 @@ public void testGetSuccessExistingProperties() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTagProperties); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -59,19 +59,17 @@ public void testGetSuccessExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); resolver.get(mockEnv).get(); - final TagProperties newTagProperties = new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); - final MetadataChangeProposal proposal = MutationUtils.buildMetadataChangeProposalWithUrn(UrnUtils.getUrn(TEST_ENTITY_URN), - TAG_PROPERTIES_ASPECT_NAME, newTagProperties); + final TagProperties newTagProperties = + new TagProperties().setName("Test Tag").setColorHex(TEST_COLOR_HEX); + final MetadataChangeProposal proposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn(TEST_ENTITY_URN), TAG_PROPERTIES_ASPECT_NAME, newTagProperties); - Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(Authentication.class), - Mockito.eq(false) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .ingestProposal(Mockito.eq(proposal), Mockito.any(Authentication.class), Mockito.eq(false)); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_ENTITY_URN))); } @Test @@ -81,10 +79,11 @@ public void testGetFailureNoExistingProperties() throws Exception { EntityService mockService = getMockEntityService(); // Test setting the domain - Mockito.when(mockService.getAspect( - Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), - Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), - Mockito.eq(0))) + Mockito.when( + mockService.getAspect( + Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)), + Mockito.eq(Constants.TAG_PROPERTIES_ASPECT_NAME), + Mockito.eq(0))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -99,9 +98,8 @@ public void testGetFailureNoExistingProperties() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -111,21 +109,26 @@ public void testGetFailureTagDoesNotExist() throws Exception { // Test setting the domain final TagProperties oldTagProperties = new TagProperties().setName("Test Tag"); - final EnvelopedAspect oldTagPropertiesAspect = new EnvelopedAspect() - .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) - .setValue(new Aspect(oldTagProperties.data())); - Mockito.when(mockClient.batchGetV2( - Mockito.eq(Constants.TAG_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), - Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(Urn.createFromString(TEST_ENTITY_URN), - new EntityResponse() - .setEntityName(Constants.TAG_ENTITY_NAME) - .setUrn(Urn.createFromString(TEST_ENTITY_URN)) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.TAG_PROPERTIES_ASPECT_NAME, - oldTagPropertiesAspect))))); + final EnvelopedAspect oldTagPropertiesAspect = + new EnvelopedAspect() + .setName(Constants.TAG_PROPERTIES_ASPECT_NAME) + .setValue(new Aspect(oldTagProperties.data())); + Mockito.when( + mockClient.batchGetV2( + Mockito.eq(Constants.TAG_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(Urn.createFromString(TEST_ENTITY_URN)))), + Mockito.eq(ImmutableSet.of(Constants.TAG_PROPERTIES_ASPECT_NAME)), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + Urn.createFromString(TEST_ENTITY_URN), + new EntityResponse() + .setEntityName(Constants.TAG_ENTITY_NAME) + .setUrn(Urn.createFromString(TEST_ENTITY_URN)) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.TAG_PROPERTIES_ASPECT_NAME, oldTagPropertiesAspect))))); EntityService mockService = getMockEntityService(); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -139,9 +142,8 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when(mockEnv.getArgument(Mockito.eq("colorHex"))).thenReturn(TEST_COLOR_HEX); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test @@ -159,18 +161,18 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); - SetTagColorResolver resolver = new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); + SetTagColorResolver resolver = + new SetTagColorResolver(mockClient, Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -181,4 +183,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 213d21fd35dc1e..cb827a42333b23 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -19,13 +22,10 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class AddTermsResolverTest { - private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -33,11 +33,12 @@ public class AddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) - .thenReturn(null); + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) + .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_TERM_1_URN))).thenReturn(true); @@ -48,41 +49,42 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -94,37 +96,34 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.eq(false) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_1_URN)) - ); - - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), Mockito.any(AuditStamp.class), Mockito.eq(false)); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_1_URN))); + + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_TERM_2_URN))); } @Test public void testGetFailureTermDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(true); @@ -135,26 +134,28 @@ public void testGetFailureTermDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN))).thenReturn(false); @@ -165,16 +166,17 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test @@ -185,38 +187,41 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.verify(mockService, Mockito.times(0)) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); } @Test public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - AddTermsInput input = new AddTermsInput(ImmutableList.of( - TEST_TERM_1_URN - ), TEST_ENTITY_URN, null, null); + AddTermsInput input = + new AddTermsInput(ImmutableList.of(TEST_TERM_1_URN), TEST_ENTITY_URN, null, null); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 8887bb452b478c..7df19fad52689f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchAddTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_GLOSSARY_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_GLOSSARY_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,123 +36,134 @@ public class BatchAddTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetSuccessExistingTerms() throws Exception { - GlossaryTerms originalTerms = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN)))) - ); + GlossaryTerms originalTerms = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_GLOSSARY_TERM_1_URN))))); EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(originalTerms); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of( - TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); verifyIngestProposal(mockService, 1); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))); - Mockito.verify(mockService, Mockito.times(1)).exists( - Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN)) - ); + Mockito.verify(mockService, Mockito.times(1)) + .exists(Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_2_URN))); } @Test public void testGetFailureTagDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(false); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(false); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -163,31 +175,35 @@ public void testGetFailureTagDoesNotExist() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_2))).thenReturn(true); - Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))).thenReturn(true); + Mockito.when(mockService.exists(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN))) + .thenReturn(true); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -203,11 +219,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, - TEST_GLOSSARY_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN, TEST_GLOSSARY_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -220,21 +237,25 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchAddTermsInput input = new BatchAddTermsInput(ImmutableList.of(TEST_GLOSSARY_TERM_1_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); + BatchAddTermsInput input = + new BatchAddTermsInput( + ImmutableList.of(TEST_GLOSSARY_TERM_1_URN), + ImmutableList.of(new ResourceRefInput(TEST_ENTITY_URN_1, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index 995a4acb8a4676..659ce40542a9cf 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.term; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermAssociation; @@ -20,14 +23,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class BatchRemoveTermsResolverTest { - private static final String TEST_ENTITY_URN_1 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; - private static final String TEST_ENTITY_URN_2 = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; + private static final String TEST_ENTITY_URN_1 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_ENTITY_URN_2 = + "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test-2,PROD)"; private static final String TEST_TERM_1_URN = "urn:li:glossaryTerm:test-id-1"; private static final String TEST_TERM_2_URN = "urn:li:glossaryTerm:test-id-2"; @@ -35,15 +36,17 @@ public class BatchRemoveTermsResolverTest { public void testGetSuccessNoExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -57,12 +60,12 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -74,25 +77,36 @@ public void testGetSuccessNoExistingTerms() throws Exception { public void testGetSuccessExistingTerms() throws Exception { EntityService mockService = getMockEntityService(); - final GlossaryTerms oldTerms1 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms1 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)), + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_2_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms1); - final GlossaryTerms oldTerms2 = new GlossaryTerms().setTerms(new GlossaryTermAssociationArray(ImmutableList.of( - new GlossaryTermAssociation().setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN)) - ))); - - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + final GlossaryTerms oldTerms2 = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation() + .setUrn(GlossaryTermUrn.createFromString(TEST_TERM_1_URN))))); + + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(oldTerms2); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(true); @@ -106,12 +120,12 @@ public void testGetSuccessExistingTerms() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); @@ -123,15 +137,17 @@ public void testGetSuccessExistingTerms() throws Exception { public void testGetFailureResourceDoesNotExist() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_1)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); - Mockito.when(mockService.getAspect( - Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), - Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), - Mockito.eq(0L))) + Mockito.when( + mockService.getAspect( + Mockito.eq(UrnUtils.getUrn(TEST_ENTITY_URN_2)), + Mockito.eq(Constants.GLOSSARY_TERMS_ASPECT_NAME), + Mockito.eq(0L))) .thenReturn(null); Mockito.when(mockService.exists(Urn.createFromString(TEST_ENTITY_URN_1))).thenReturn(false); @@ -143,12 +159,12 @@ public void testGetFailureResourceDoesNotExist() throws Exception { // Execute resolver QueryContext mockContext = getMockAllowContext(); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -164,12 +180,12 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); QueryContext mockContext = getMockDenyContext(); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); @@ -182,24 +198,27 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { EntityService mockService = getMockEntityService(); - Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( - Mockito.any(AspectsBatchImpl.class), - Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .ingestProposal( + Mockito.any(AspectsBatchImpl.class), + Mockito.any(AuditStamp.class), + Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockAllowContext(); - BatchRemoveTermsInput input = new BatchRemoveTermsInput(ImmutableList.of( - TEST_TERM_1_URN, - TEST_TERM_2_URN - ), ImmutableList.of( - new ResourceRefInput(TEST_ENTITY_URN_1, null, null), - new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); + BatchRemoveTermsInput input = + new BatchRemoveTermsInput( + ImmutableList.of(TEST_TERM_1_URN, TEST_TERM_2_URN), + ImmutableList.of( + new ResourceRefInput(TEST_ENTITY_URN_1, null, null), + new ResourceRefInput(TEST_ENTITY_URN_2, null, null))); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java index 911152d8c97c12..adf4b1c29ad0d2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/CreateTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.CreateTestInput; @@ -19,19 +22,15 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateTestResolverTest { - private static final CreateTestInput TEST_INPUT = new CreateTestInput( - "test-id", - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final CreateTestInput TEST_INPUT = + new CreateTestInput( + "test-id", + "test-name", + "test-category", + "test-description", + new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -50,16 +49,21 @@ public void testGetSuccess() throws Exception { final TestKey key = new TestKey(); key.setId("test-id"); - ArgumentCaptor proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityKeyAspect(), GenericRecordUtils.serializeAspect(key)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -80,19 +84,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.eq(false)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class), Mockito.eq(false)); CreateTestResolver resolver = new CreateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java index 6a449e3c4c4c4b..1c4973871af091 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/DeleteTestResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; @@ -9,10 +12,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; @@ -30,10 +29,9 @@ public void testGetSuccess() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockClient, Mockito.times(1)).deleteEntity( - Mockito.eq(Urn.createFromString(TEST_URN)), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockClient, Mockito.times(1)) + .deleteEntity( + Mockito.eq(Urn.createFromString(TEST_URN)), Mockito.any(Authentication.class)); } @Test @@ -49,8 +47,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).deleteEntity( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .deleteEntity(Mockito.any(), Mockito.any(Authentication.class)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java index 5026e015039e1e..6075425d09c050 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/ListTestsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableSet; import com.linkedin.common.urn.Urn; @@ -18,37 +21,34 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListTestsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("test", "test-id"); - private static final ListTestsInput TEST_INPUT = new ListTestsInput( - 0, 20, null - ); + private static final ListTestsInput TEST_INPUT = new ListTestsInput(0, 20, null); @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.TEST_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq(Collections.emptyMap()), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)))).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + Mockito.when( + mockClient.search( + Mockito.eq(Constants.TEST_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq(Collections.emptyMap()), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListTestsResolver resolver = new ListTestsResolver(mockClient); @@ -75,33 +75,35 @@ public void testGetUnauthorized() throws Exception { // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); QueryContext mockContext = getMockDenyContext(); - Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn( - TEST_INPUT); + Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(TEST_INPUT); Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.verify(mockClient, Mockito.times(0)) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); } @Test public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( - Mockito.any(), - Mockito.eq(""), - Mockito.anyMap(), - Mockito.anyInt(), - Mockito.anyInt(), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true))); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( + Mockito.any(), + Mockito.eq(""), + Mockito.anyMap(), + Mockito.anyInt(), + Mockito.anyInt(), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true))); ListTestsResolver resolver = new ListTestsResolver(mockClient); // Execute resolver @@ -112,4 +114,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java index ae24232bce17cd..45e0126367578c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/test/UpdateTestResolverTest.java @@ -1,10 +1,13 @@ package com.linkedin.datahub.graphql.resolvers.test; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.datahub.graphql.generated.TestDefinitionInput; +import com.linkedin.datahub.graphql.generated.UpdateTestInput; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.metadata.Constants; @@ -19,19 +22,12 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateTestResolverTest { private static final String TEST_URN = "urn:li:test:test-id"; - private static final UpdateTestInput TEST_INPUT = new UpdateTestInput( - "test-name", - "test-category", - "test-description", - new TestDefinitionInput("{}") - ); + private static final UpdateTestInput TEST_INPUT = + new UpdateTestInput( + "test-name", "test-category", "test-description", new TestDefinitionInput("{}")); @Test public void testGetSuccess() throws Exception { @@ -48,16 +44,21 @@ public void testGetSuccess() throws Exception { resolver.get(mockEnv).get(); - ArgumentCaptor proposalCaptor = ArgumentCaptor.forClass(MetadataChangeProposal.class); + ArgumentCaptor proposalCaptor = + ArgumentCaptor.forClass(MetadataChangeProposal.class); Mockito.verify(mockClient, Mockito.times(1)) - .ingestProposal(proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); + .ingestProposal( + proposalCaptor.capture(), Mockito.any(Authentication.class), Mockito.eq(false)); MetadataChangeProposal resultProposal = proposalCaptor.getValue(); assertEquals(resultProposal.getEntityType(), Constants.TEST_ENTITY_NAME); assertEquals(resultProposal.getAspectName(), Constants.TEST_INFO_ASPECT_NAME); assertEquals(resultProposal.getChangeType(), ChangeType.UPSERT); assertEquals(resultProposal.getEntityUrn(), UrnUtils.getUrn(TEST_URN)); - TestInfo resultInfo = GenericRecordUtils.deserializeAspect(resultProposal.getAspect().getValue(), - resultProposal.getAspect().getContentType(), TestInfo.class); + TestInfo resultInfo = + GenericRecordUtils.deserializeAspect( + resultProposal.getAspect().getValue(), + resultProposal.getAspect().getContentType(), + TestInfo.class); assertEquals(resultInfo.getName(), "test-name"); assertEquals(resultInfo.getCategory(), "test-category"); assertEquals(resultInfo.getDescription(), "test-description"); @@ -79,18 +80,17 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetEntityClientException() throws Exception { // Update resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); UpdateTestResolver resolver = new UpdateTestResolver(mockClient); // Execute resolver diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java index 2164d4160634ce..742e162963ea38 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/user/CreateNativeUserResetTokenResolverTest.java @@ -1,5 +1,9 @@ package com.linkedin.datahub.graphql.resolvers.user; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.datahub.authentication.user.NativeUserService; import com.linkedin.datahub.graphql.QueryContext; @@ -8,11 +12,6 @@ import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.mockito.Mockito.*; -import static org.testng.Assert.*; - - public class CreateNativeUserResetTokenResolverTest { private static final String RESET_TOKEN = "resetToken"; @@ -47,7 +46,8 @@ public void testFailsNullUserUrn() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(null); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); } @@ -59,7 +59,8 @@ public void testPasses() throws Exception { CreateNativeUserResetTokenInput input = new CreateNativeUserResetTokenInput(USER_URN_STRING); when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); when(mockContext.getAuthentication()).thenReturn(_authentication); - when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())).thenReturn(RESET_TOKEN); + when(_nativeUserService.generateNativeUserPasswordResetToken(any(), any())) + .thenReturn(RESET_TOKEN); assertEquals(RESET_TOKEN, _resolver.get(_dataFetchingEnvironment).join().getResetToken()); } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java index 0957acf0cbbb30..15864dc3ac925f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/CreateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; @@ -15,6 +18,8 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; @@ -23,34 +28,35 @@ import com.linkedin.metadata.query.filter.Filter; import com.linkedin.metadata.service.ViewService; import com.linkedin.view.DataHubViewDefinition; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class CreateViewResolverTest { - private static final CreateViewInput TEST_INPUT = new CreateViewInput( - DataHubViewType.PERSONAL, - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final CreateViewInput TEST_INPUT = + new CreateViewInput( + DataHubViewType.PERSONAL, + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); @@ -71,37 +77,59 @@ public void testGetSuccess() throws Exception { assertEquals(view.getDescription(), TEST_INPUT.getDescription()); assertEquals(view.getViewType(), TEST_INPUT.getViewType()); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - assertEquals(view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); - assertEquals(view.getDefinition().getFilter().getOperator(), TEST_INPUT.getDefinition().getFilter().getOperator()); - assertEquals(view.getDefinition().getFilter().getFilters().size(), TEST_INPUT.getDefinition().getFilter().getFilters().size()); - - Mockito.verify(mockService, Mockito.times(1)).createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + assertEquals( + view.getDefinition().getEntityTypes(), TEST_INPUT.getDefinition().getEntityTypes()); + assertEquals( + view.getDefinition().getFilter().getOperator(), + TEST_INPUT.getDefinition().getFilter().getOperator()); + assertEquals( + view.getDefinition().getFilter().getFilters().size(), + TEST_INPUT.getDefinition().getFilter().getFilters().size()); + + Mockito.verify(mockService, Mockito.times(1)) + .createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -118,22 +146,23 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).createView( - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .createView( + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); CreateViewResolver resolver = new CreateViewResolver(mockService); @@ -148,14 +177,15 @@ public void testGetViewServiceException() throws Exception { private ViewService initMockService() { ViewService service = Mockito.mock(ViewService.class); - Mockito.when(service.createView( - Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong() - )).thenReturn(TEST_VIEW_URN); + Mockito.when( + service.createView( + Mockito.eq(com.linkedin.view.DataHubViewType.PERSONAL), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong())) + .thenReturn(TEST_VIEW_URN); return service; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java index afb4c16767f47c..357f2119187d63 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/DeleteViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; @@ -17,10 +20,6 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class DeleteViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); @@ -40,10 +39,8 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -60,10 +57,8 @@ public void testGetSuccessGlobalViewCanManager() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -79,13 +74,10 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } - @Test public void testGetSuccessPersonalViewIsCreator() throws Exception { ViewService mockService = initViewService(DataHubViewType.PERSONAL); @@ -99,10 +91,8 @@ public void testGetSuccessPersonalViewIsCreator() throws Exception { assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test @@ -118,19 +108,17 @@ public void testGetFailurePersonalViewIsNotCreator() throws Exception { assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).deleteView( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(0)) + .deleteView(Mockito.eq(TEST_URN), Mockito.any(Authentication.class)); } @Test public void testGetViewServiceException() throws Exception { // Create resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).deleteView( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .deleteView(Mockito.any(), Mockito.any(Authentication.class)); DeleteViewResolver resolver = new DeleteViewResolver(mockService); @@ -146,19 +134,21 @@ public void testGetViewServiceException() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java index 9a25c9eb1d25c6..8c30c17201bc65 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListGlobalViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -28,53 +31,53 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListGlobalViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput( - 0, 20, "" - ); + private static final ListGlobalViewsInput TEST_INPUT = new ListGlobalViewsInput(0, 20, ""); @Test public void testGetSuccessInput() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListGlobalViewsResolver resolver = new ListGlobalViewsResolver(mockClient); @@ -107,7 +110,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -121,7 +125,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -139,4 +145,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java index 4c435841448251..85e20cd656fcd3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ListMyViewsResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; @@ -27,63 +30,65 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class ListMyViewsResolverTest { private static final Urn TEST_URN = Urn.createFromTuple("dataHubView", "test-id"); private static final Urn TEST_USER = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final ListMyViewsInput TEST_INPUT_1 = new ListMyViewsInput( - 0, 20, "", DataHubViewType.GLOBAL - ); + private static final ListMyViewsInput TEST_INPUT_1 = + new ListMyViewsInput(0, 20, "", DataHubViewType.GLOBAL); - private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput( - 0, 20, "", null - ); + private static final ListMyViewsInput TEST_INPUT_2 = new ListMyViewsInput(0, 20, "", null); @Test public void testGetSuccessInput1() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false), + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false), new Criterion() - .setField("type.keyword") - .setValue(DataHubViewType.GLOBAL.toString()) - .setValues(new StringArray( - ImmutableList.of(DataHubViewType.GLOBAL.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("type.keyword") + .setValue(DataHubViewType.GLOBAL.toString()) + .setValues( + new StringArray( + ImmutableList.of( + DataHubViewType.GLOBAL + .toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -106,35 +111,41 @@ public void testGetSuccessInput2() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.when(mockClient.search( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(""), - Mockito.eq( + Mockito.when( + mockClient.search( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(""), + Mockito.eq( new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( + .setAnd( + new CriterionArray( + ImmutableList.of( new Criterion() - .setField("createdBy.keyword") - .setValue(TEST_USER.toString()) - .setValues(new StringArray(ImmutableList.of(TEST_USER.toString()))) - .setCondition(Condition.EQUAL) - .setNegated(false) - ))) - ))) - ), - Mockito.any(), - Mockito.eq(0), - Mockito.eq(20), - Mockito.any(Authentication.class), - Mockito.eq(new SearchFlags().setFulltext(true)) - )).thenReturn( - new SearchResult() - .setFrom(0) - .setPageSize(1) - .setNumEntities(1) - .setEntities(new SearchEntityArray(ImmutableSet.of(new SearchEntity().setEntity(TEST_URN)))) - ); + .setField("createdBy.keyword") + .setValue(TEST_USER.toString()) + .setValues( + new StringArray( + ImmutableList.of( + TEST_USER.toString()))) + .setCondition(Condition.EQUAL) + .setNegated(false)))))))), + Mockito.any(), + Mockito.eq(0), + Mockito.eq(20), + Mockito.any(Authentication.class), + Mockito.eq(new SearchFlags().setFulltext(true)))) + .thenReturn( + new SearchResult() + .setFrom(0) + .setPageSize(1) + .setNumEntities(1) + .setEntities( + new SearchEntityArray( + ImmutableSet.of(new SearchEntity().setEntity(TEST_URN))))); ListMyViewsResolver resolver = new ListMyViewsResolver(mockClient); @@ -165,7 +176,8 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).search( + Mockito.verify(mockClient, Mockito.times(0)) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -179,7 +191,9 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).search( + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .search( Mockito.any(), Mockito.eq(""), Mockito.anyMap(), @@ -197,4 +211,4 @@ public void testGetEntityClientException() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java index b4895982ae7801..1917e55705828d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/UpdateViewResolverTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -38,30 +41,33 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - - public class UpdateViewResolverTest { private static final Urn TEST_URN = UrnUtils.getUrn("urn:li:dataHubView:test-id"); private static final Urn TEST_AUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:auth"); private static final Urn TEST_UNAUTHORIZED_USER = UrnUtils.getUrn("urn:li:corpuser:no-auth"); - private static final UpdateViewInput TEST_INPUT = new UpdateViewInput( - "test-name", - "test-description", - new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.EQUAL), - new FacetFilterInput("test2", null, ImmutableList.of("value1", "value2"), true, FilterOperator.IN) - ) - ) - ) - ); + private static final UpdateViewInput TEST_INPUT = + new UpdateViewInput( + "test-name", + "test-description", + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.EQUAL), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value1", "value2"), + true, + FilterOperator.IN))))); @Test public void testGetSuccessGlobalViewIsCreator() throws Exception { @@ -81,33 +87,50 @@ public void testGetSuccessGlobalViewIsCreator() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test @@ -128,46 +151,65 @@ public void testGetSuccessGlobalViewManageGlobalViews() throws Exception { assertEquals(view.getViewType(), com.linkedin.datahub.graphql.generated.DataHubViewType.GLOBAL); assertEquals(view.getType(), EntityType.DATAHUB_VIEW); - Mockito.verify(mockService, Mockito.times(1)).updateView( - Mockito.eq(TEST_URN), - Mockito.eq(TEST_INPUT.getName()), - Mockito.eq(TEST_INPUT.getDescription()), - Mockito.eq( - new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion() - .setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setCondition(Condition.EQUAL) - .setField("test1.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(false), - new Criterion() - .setCondition(Condition.IN) - .setField("test2.keyword") - .setValue("value1") // Unfortunate --- For backwards compat. - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setNegated(true) - ))) - )) - ) - )), Mockito.any(Authentication.class), Mockito.anyLong()); + Mockito.verify(mockService, Mockito.times(1)) + .updateView( + Mockito.eq(TEST_URN), + Mockito.eq(TEST_INPUT.getName()), + Mockito.eq(TEST_INPUT.getDescription()), + Mockito.eq( + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setCondition(Condition.EQUAL) + .setField("test1.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(false), + new Criterion() + .setCondition(Condition.IN) + .setField("test2.keyword") + .setValue( + "value1") // Unfortunate --- For + // backwards compat. + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setNegated(true))))))))), + Mockito.any(Authentication.class), + Mockito.anyLong()); } @Test public void testGetViewServiceException() throws Exception { // Update resolver ViewService mockService = Mockito.mock(ViewService.class); - Mockito.doThrow(RuntimeException.class).when(mockService).updateView( - Mockito.any(Urn.class), - Mockito.any(), - Mockito.any(), - Mockito.any(), - Mockito.any(Authentication.class), - Mockito.anyLong()); + Mockito.doThrow(RuntimeException.class) + .when(mockService) + .updateView( + Mockito.any(Urn.class), + Mockito.any(), + Mockito.any(), + Mockito.any(), + Mockito.any(Authentication.class), + Mockito.anyLong()); UpdateViewResolver resolver = new UpdateViewResolver(mockService); @@ -196,43 +238,46 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockClient, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(Authentication.class)); + Mockito.verify(mockClient, Mockito.times(0)) + .ingestProposal(Mockito.any(), Mockito.any(Authentication.class)); } private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName(TEST_INPUT.getName()) - .setDescription(TEST_INPUT.getDescription()) - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - EntityResponse testEntityResponse = new EntityResponse() - .setUrn(TEST_URN) - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect() - .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) - .setType(AspectType.VERSIONED) - .setValue(new Aspect(testInfo.data())) - ))); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName(TEST_INPUT.getName()) + .setDescription(TEST_INPUT.getDescription()) + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + EntityResponse testEntityResponse = + new EntityResponse() + .setUrn(TEST_URN) + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, + new EnvelopedAspect() + .setName(Constants.DATAHUB_VIEW_INFO_ASPECT_NAME) + .setType(AspectType.VERSIONED) + .setValue(new Aspect(testInfo.data()))))); + + Mockito.when(mockService.getViewInfo(Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); - Mockito.when(mockService.getViewEntityResponse( - Mockito.eq(TEST_URN), - Mockito.any(Authentication.class))) + Mockito.when( + mockService.getViewEntityResponse( + Mockito.eq(TEST_URN), Mockito.any(Authentication.class))) .thenReturn(testEntityResponse); return mockService; } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java index 9578ff201ca194..3ad3f0786e9873 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/view/ViewUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.resolvers.view; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.linkedin.common.AuditStamp; @@ -25,12 +28,8 @@ import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; import graphql.Assert; -import org.testng.annotations.Test; import org.mockito.Mockito; - -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - +import org.testng.annotations.Test; public class ViewUtilsTest { @@ -39,10 +38,10 @@ public class ViewUtilsTest { private static final Urn TEST_VIEW_URN = UrnUtils.getUrn("urn:li:dataHubView:test"); - @Test public static void testCanCreatePersonalViewAllowed() { - boolean res = ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); + boolean res = + ViewUtils.canCreateView(DataHubViewType.PERSONAL, Mockito.mock(QueryContext.class)); Assert.assertTrue(res); } @@ -67,10 +66,8 @@ public void testCanUpdateViewSuccessGlobalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -80,10 +77,8 @@ public void testCanUpdateViewSuccessGlobalViewCanManageGlobalViews() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -93,10 +88,8 @@ public void testGetFailureGlobalViewIsNotCreatorOrManager() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -106,10 +99,8 @@ public void testGetSuccessPersonalViewIsCreator() { assertTrue(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test @@ -119,50 +110,69 @@ public void testGetFailurePersonalViewIsNotCreator() { assertFalse(ViewUtils.canUpdateView(mockService, TEST_VIEW_URN, mockContext)); - Mockito.verify(mockService, Mockito.times(1)).getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class) - ); + Mockito.verify(mockService, Mockito.times(1)) + .getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class)); } @Test public void testMapDefinition() throws Exception { - DataHubViewDefinitionInput input = new DataHubViewDefinitionInput( - ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), - new DataHubViewFilterInput( - LogicalOperator.AND, - ImmutableList.of( - new FacetFilterInput("test1", null, ImmutableList.of("value1", "value2"), false, FilterOperator.IN), - new FacetFilterInput("test2", null, ImmutableList.of("value3", "value4"), true, FilterOperator.CONTAIN) - ) - ) - ); - - DataHubViewDefinition expectedResult = new DataHubViewDefinition() - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray( - ImmutableList.of(new ConjunctiveCriterion() - .setAnd( - new CriterionArray(ImmutableList.of( - new Criterion() - .setNegated(false) - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setValue("value1") // Disgraceful - .setField("test1.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.IN), - new Criterion() - .setNegated(true) - .setValues(new StringArray(ImmutableList.of("value3", "value4"))) - .setValue("value3") // Disgraceful - .setField("test2.keyword") // Consider whether we should NOT go through the keyword mapping. - .setCondition(Condition.CONTAIN) - )) - ) - ) - )) - ); + DataHubViewDefinitionInput input = + new DataHubViewDefinitionInput( + ImmutableList.of(EntityType.DATASET, EntityType.DASHBOARD), + new DataHubViewFilterInput( + LogicalOperator.AND, + ImmutableList.of( + new FacetFilterInput( + "test1", + null, + ImmutableList.of("value1", "value2"), + false, + FilterOperator.IN), + new FacetFilterInput( + "test2", + null, + ImmutableList.of("value3", "value4"), + true, + FilterOperator.CONTAIN)))); + + DataHubViewDefinition expectedResult = + new DataHubViewDefinition() + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME))) + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setNegated(false) + .setValues( + new StringArray( + ImmutableList.of("value1", "value2"))) + .setValue("value1") // Disgraceful + .setField( + "test1.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.IN), + new Criterion() + .setNegated(true) + .setValues( + new StringArray( + ImmutableList.of("value3", "value4"))) + .setValue("value3") // Disgraceful + .setField( + "test2.keyword") // Consider whether we + // should NOT go through + // the keyword mapping. + .setCondition(Condition.CONTAIN)))))))); assertEquals(ViewUtils.mapDefinition(input), expectedResult); } @@ -170,17 +180,20 @@ public void testMapDefinition() throws Exception { private static ViewService initViewService(DataHubViewType viewType) { ViewService mockService = Mockito.mock(ViewService.class); - DataHubViewInfo testInfo = new DataHubViewInfo() - .setType(viewType) - .setName("test-name") - .setDescription("test-description") - .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) - .setDefinition(new DataHubViewDefinition().setEntityTypes(new StringArray()).setFilter(new Filter())); - - Mockito.when(mockService.getViewInfo( - Mockito.eq(TEST_VIEW_URN), - Mockito.any(Authentication.class))) + DataHubViewInfo testInfo = + new DataHubViewInfo() + .setType(viewType) + .setName("test-name") + .setDescription("test-description") + .setCreated(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_AUTHORIZED_USER).setTime(0L)) + .setDefinition( + new DataHubViewDefinition() + .setEntityTypes(new StringArray()) + .setFilter(new Filter())); + + Mockito.when( + mockService.getViewInfo(Mockito.eq(TEST_VIEW_URN), Mockito.any(Authentication.class))) .thenReturn(testInfo); return mockService; diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java index c4465c7d3cb659..c975c7ebb0507c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/assertion/AssertionTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.assertion; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -28,27 +30,25 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class AssertionTypeTest { private static final String TEST_ASSERTION_URN = "urn:li:assertion:guid-1"; - private static final AssertionKey TEST_ASSERTION_KEY = new AssertionKey() - .setAssertionId("guid-1"); - private static final AssertionInfo TEST_ASSERTION_INFO = new AssertionInfo() - .setType(AssertionType.DATASET) - .setDatasetAssertion(null, SetMode.IGNORE_NULL) - .setCustomProperties(new StringMap()); - private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("snowflake")) - .setInstance(null, SetMode.IGNORE_NULL); + private static final AssertionKey TEST_ASSERTION_KEY = + new AssertionKey().setAssertionId("guid-1"); + private static final AssertionInfo TEST_ASSERTION_INFO = + new AssertionInfo() + .setType(AssertionType.DATASET) + .setDatasetAssertion(null, SetMode.IGNORE_NULL) + .setCustomProperties(new StringMap()); + private static final DataPlatformInstance TEST_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance() + .setPlatform(new DataPlatformUrn("snowflake")) + .setInstance(null, SetMode.IGNORE_NULL); private static final String TEST_ASSERTION_URN_2 = "urn:li:assertion:guid-2"; - @Test public void testBatchLoad() throws Exception { @@ -60,41 +60,43 @@ public void testBatchLoad() throws Exception { Map assertion1Aspects = new HashMap<>(); assertion1Aspects.put( Constants.ASSERTION_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_KEY.data()))); assertion1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_DATA_PLATFORM_INSTANCE.data()))); assertion1Aspects.put( Constants.ASSERTION_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - assertionUrn1, - new EntityResponse() - .setEntityName(Constants.ASSERTION_ENTITY_NAME) - .setUrn(assertionUrn1) - .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); - - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_ASSERTION_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(assertionUrn1, assertionUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + assertionUrn1, + new EntityResponse() + .setEntityName(Constants.ASSERTION_ENTITY_NAME) + .setUrn(assertionUrn1) + .setAspects(new EnvelopedAspectMap(assertion1Aspects)))); + + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.ASSERTION_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.ASSERTION_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(assertionUrn1, assertionUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.assertion.AssertionType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -112,17 +114,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.assertion.AssertionType type = new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.assertion.AssertionType type = + new com.linkedin.datahub.graphql.types.assertion.AssertionType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_ASSERTION_URN, TEST_ASSERTION_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java index 3ff4e43ca112c4..1e2acd0db455cd 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/container/ContainerTypeTest.java @@ -1,6 +1,7 @@ - package com.linkedin.datahub.graphql.types.container; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -26,12 +27,12 @@ import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; +import com.linkedin.container.ContainerProperties; import com.linkedin.container.EditableContainerProperties; import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.Container; import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.container.ContainerProperties; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; @@ -46,46 +47,55 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class ContainerTypeTest { private static final String TEST_CONTAINER_1_URN = "urn:li:container:guid-1"; - private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey() - .setGuid("guid-1"); - private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = new ContainerProperties() - .setDescription("test description") - .setName("Test Container"); - private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = new EditableContainerProperties() - .setDescription("test editable description"); - private static final Ownership TEST_CONTAINER_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); - private static final Status TEST_CONTAINER_1_STATUS = new Status() - .setRemoved(false); - private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = new SubTypes() - .setTypeNames(new StringArray(ImmutableList.of("Database"))); - private static final GlobalTags TEST_CONTAINER_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); - private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = new com.linkedin.container.Container() - .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); + private static final ContainerKey TEST_CONTAINER_1_KEY = new ContainerKey().setGuid("guid-1"); + private static final ContainerProperties TEST_CONTAINER_1_PROPERTIES = + new ContainerProperties().setDescription("test description").setName("Test Container"); + private static final EditableContainerProperties TEST_CONTAINER_1_EDITABLE_PROPERTIES = + new EditableContainerProperties().setDescription("test editable description"); + private static final Ownership TEST_CONTAINER_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_CONTAINER_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DataPlatformInstance TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(Urn.createFromTuple("dataPlatform", "mysql")); + private static final Status TEST_CONTAINER_1_STATUS = new Status().setRemoved(false); + private static final SubTypes TEST_CONTAINER_1_SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("Database"))); + private static final GlobalTags TEST_CONTAINER_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_CONTAINER_1_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final com.linkedin.container.Container TEST_CONTAINER_1_CONTAINER = + new com.linkedin.container.Container() + .setContainer(Urn.createFromTuple(Constants.CONTAINER_ENTITY_NAME, "parent-container")); private static final String TEST_CONTAINER_2_URN = "urn:li:container:guid-2"; @@ -100,73 +110,65 @@ public void testBatchLoad() throws Exception { Map container1Aspects = new HashMap<>(); container1Aspects.put( Constants.CONTAINER_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_KEY.data()))); container1Aspects.put( Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_DATA_PLATFORM_INSTANCE.data()))); container1Aspects.put( Constants.CONTAINER_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_PROPERTIES.data()))); container1Aspects.put( Constants.CONTAINER_EDITABLE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_EDITABLE_PROPERTIES.data()))); container1Aspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_OWNERSHIP.data()))); container1Aspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_INSTITUTIONAL_MEMORY.data()))); container1Aspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_SUB_TYPES.data()))); container1Aspects.put( Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_STATUS.data()))); container1Aspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_TAGS.data()))); container1Aspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_GLOSSARY_TERMS.data()))); container1Aspects.put( Constants.CONTAINER_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - containerUrn1, - new EntityResponse() - .setEntityName(Constants.CONTAINER_ENTITY_NAME) - .setUrn(containerUrn1) - .setAspects(new EnvelopedAspectMap(container1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_CONTAINER_1_CONTAINER.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(containerUrn1, containerUrn2))), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + containerUrn1, + new EntityResponse() + .setEntityName(Constants.CONTAINER_ENTITY_NAME) + .setUrn(containerUrn1) + .setAspects(new EnvelopedAspectMap(container1Aspects)))); ContainerType type = new ContainerType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.CONTAINER_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), - Mockito.eq(ContainerType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.CONTAINER_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(containerUrn1, containerUrn2)), + Mockito.eq(ContainerType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -177,8 +179,12 @@ public void testBatchLoad() throws Exception { assertEquals(container1.getProperties().getDescription(), "test description"); assertEquals(container1.getProperties().getName(), "Test Container"); assertEquals(container1.getInstitutionalMemory().getElements().size(), 1); - assertEquals(container1.getSubTypes().getTypeNames().get(0), TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); - assertEquals(container1.getEditableProperties().getDescription(), TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); + assertEquals( + container1.getSubTypes().getTypeNames().get(0), + TEST_CONTAINER_1_SUB_TYPES.getTypeNames().get(0)); + assertEquals( + container1.getEditableProperties().getDescription(), + TEST_CONTAINER_1_EDITABLE_PROPERTIES.getDescription()); assertEquals( container1.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_CONTAINER_1_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); @@ -186,8 +192,7 @@ public void testBatchLoad() throws Exception { container1.getTags().getTags().get(0).getTag().getUrn(), TEST_CONTAINER_1_TAGS.getTags().get(0).getTag().toString()); assertEquals( - container1.getContainer().getUrn(), - TEST_CONTAINER_1_CONTAINER.getContainer().toString()); + container1.getContainer().getUrn(), TEST_CONTAINER_1_CONTAINER.getContainer().toString()); // Assert second element is null. assertNull(result.get(1)); @@ -196,17 +201,21 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad(ImmutableList.of(TEST_CONTAINER_1_URN, TEST_CONTAINER_2_URN), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java index 9b6e11fd0b3a45..667d943b1095d0 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataplatforminstance/DataPlatformInstanceTest.java @@ -1,22 +1,24 @@ package com.linkedin.datahub.graphql.types.dataplatforminstance; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; +import com.linkedin.common.AuditStamp; import com.linkedin.common.Deprecation; -import com.linkedin.common.Ownership; -import com.linkedin.common.OwnerArray; -import com.linkedin.common.Owner; -import com.linkedin.common.OwnershipType; +import com.linkedin.common.GlobalTags; import com.linkedin.common.InstitutionalMemory; import com.linkedin.common.InstitutionalMemoryMetadata; import com.linkedin.common.InstitutionalMemoryMetadataArray; -import com.linkedin.common.AuditStamp; -import com.linkedin.common.GlobalTags; +import com.linkedin.common.Owner; +import com.linkedin.common.OwnerArray; +import com.linkedin.common.Ownership; +import com.linkedin.common.OwnershipType; +import com.linkedin.common.Status; import com.linkedin.common.TagAssociation; import com.linkedin.common.TagAssociationArray; -import com.linkedin.common.Status; import com.linkedin.common.url.Url; import com.linkedin.common.urn.TagUrn; import com.linkedin.common.urn.Urn; @@ -33,185 +35,181 @@ import com.linkedin.metadata.key.DataPlatformInstanceKey; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; -import org.mockito.Mockito; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; - -import static org.testng.Assert.*; +import org.mockito.Mockito; +import org.testng.annotations.Test; public class DataPlatformInstanceTest { - private static final Urn TEST_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); - - private static final String TEST_DATAPLATFORMINSTANCE_1_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; - - private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY - = new DataPlatformInstanceKey() - .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) - .setInstance("I1"); - - private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES - = new DataPlatformInstanceProperties() - .setDescription("test description") - .setName("Test Data Platform Instance"); - - private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = new Deprecation() - .setDeprecated(true) - .setActor(TEST_ACTOR_URN) - .setNote("legacy"); - - private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(TEST_ACTOR_URN)))); - - private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); - - private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - - private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status() - .setRemoved(false); - - private static final String TEST_DATAPLATFORMINSTANCE_2_URN = "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; - - @Test - public void testBatchLoad() throws Exception { - EntityClient client = Mockito.mock(EntityClient.class); - - Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); - Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); - - Map dataPlatformInstance1Aspects = new HashMap<>(); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.DEPRECATION_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data())) - ); - dataPlatformInstance1Aspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), - Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - dataPlatformInstance1Urn, - new EntityResponse() - .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) - .setUrn(dataPlatformInstance1Urn) - .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); - - DataPlatformInstanceType type = new DataPlatformInstanceType(client); - - QueryContext mockContext = Mockito.mock(QueryContext.class); - Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); - List> result = type.batchLoad( - ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), mockContext); - - // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( + private static final Urn TEST_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "test"); + + private static final String TEST_DATAPLATFORMINSTANCE_1_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I1)"; + + private static final DataPlatformInstanceKey TEST_DATAPLATFORMINSTANCE_1_KEY = + new DataPlatformInstanceKey() + .setPlatform(Urn.createFromTuple(Constants.DATA_PLATFORM_ENTITY_NAME, "P")) + .setInstance("I1"); + + private static final DataPlatformInstanceProperties TEST_DATAPLATFORMINSTANCE_1_PROPERTIES = + new DataPlatformInstanceProperties() + .setDescription("test description") + .setName("Test Data Platform Instance"); + + private static final Deprecation TEST_DATAPLATFORMINSTANCE_1_DEPRECATION = + new Deprecation().setDeprecated(true).setActor(TEST_ACTOR_URN).setNote("legacy"); + + private static final Ownership TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner().setType(OwnershipType.DATAOWNER).setOwner(TEST_ACTOR_URN)))); + + private static final InstitutionalMemory TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp(new AuditStamp().setTime(0L).setActor(TEST_ACTOR_URN))))); + + private static final GlobalTags TEST_DATAPLATFORMINSTANCE_1_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + + private static final Status TEST_DATAPLATFORMINSTANCE_1_STATUS = new Status().setRemoved(false); + + private static final String TEST_DATAPLATFORMINSTANCE_2_URN = + "urn:li:dataPlatformInstance:(urn:li:dataPlatform:P,I2)"; + + @Test + public void testBatchLoad() throws Exception { + EntityClient client = Mockito.mock(EntityClient.class); + + Urn dataPlatformInstance1Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_1_URN); + Urn dataPlatformInstance2Urn = Urn.createFromString(TEST_DATAPLATFORMINSTANCE_2_URN); + + Map dataPlatformInstance1Aspects = new HashMap<>(); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_KEY_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_KEY.data()))); + dataPlatformInstance1Aspects.put( + Constants.DATA_PLATFORM_INSTANCE_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.data()))); + dataPlatformInstance1Aspects.put( + Constants.DEPRECATION_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.data()))); + dataPlatformInstance1Aspects.put( + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_OWNERSHIP.data()))); + dataPlatformInstance1Aspects.put( + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_INSTITUTIONAL_MEMORY.data()))); + dataPlatformInstance1Aspects.put( + Constants.GLOBAL_TAGS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_TAGS.data()))); + dataPlatformInstance1Aspects.put( + Constants.STATUS_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_DATAPLATFORMINSTANCE_1_STATUS.data()))); + Mockito.when( + client.batchGetV2( Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq( + new HashSet<>( + ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn))), Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); - - assertEquals(result.size(), 2); - - DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); - assertEquals( - dataPlatformInstance1.getUrn(), - TEST_DATAPLATFORMINSTANCE_1_URN - ); - assertEquals( - dataPlatformInstance1.getType(), - EntityType.DATA_PLATFORM_INSTANCE - ); - assertEquals( - dataPlatformInstance1.getProperties().getDescription(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription() - ); - assertEquals( - dataPlatformInstance1.getProperties().getName(), - TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getDeprecated(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getNote(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote() - ); - assertEquals( - dataPlatformInstance1.getDeprecation().getActor(), - TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString() - ); - assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); - assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); - assertEquals( - dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), - TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString() - ); - assertEquals( - dataPlatformInstance1.getStatus().getRemoved(), - TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue() - ); - - // Assert second element is null. - assertNull(result.get(1)); - } - - @Test - public void testBatchLoadClientException() throws Exception { - EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type - = new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType(mockClient); - - // Execute Batch load - QueryContext context = Mockito.mock(QueryContext.class); - Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of( - TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), context)); - } + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + dataPlatformInstance1Urn, + new EntityResponse() + .setEntityName(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME) + .setUrn(dataPlatformInstance1Urn) + .setAspects(new EnvelopedAspectMap(dataPlatformInstance1Aspects)))); + + DataPlatformInstanceType type = new DataPlatformInstanceType(client); + + QueryContext mockContext = Mockito.mock(QueryContext.class); + Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + Mockito.when(mockContext.getActorUrn()).thenReturn(TEST_ACTOR_URN.toString()); + List> result = + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + mockContext); + + // Verify response + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATA_PLATFORM_INSTANCE_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(dataPlatformInstance1Urn, dataPlatformInstance2Urn)), + Mockito.eq(DataPlatformInstanceType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); + + assertEquals(result.size(), 2); + + DataPlatformInstance dataPlatformInstance1 = result.get(0).getData(); + assertEquals(dataPlatformInstance1.getUrn(), TEST_DATAPLATFORMINSTANCE_1_URN); + assertEquals(dataPlatformInstance1.getType(), EntityType.DATA_PLATFORM_INSTANCE); + assertEquals( + dataPlatformInstance1.getProperties().getDescription(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getDescription()); + assertEquals( + dataPlatformInstance1.getProperties().getName(), + TEST_DATAPLATFORMINSTANCE_1_PROPERTIES.getName()); + assertEquals( + dataPlatformInstance1.getDeprecation().getDeprecated(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.isDeprecated().booleanValue()); + assertEquals( + dataPlatformInstance1.getDeprecation().getNote(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getNote()); + assertEquals( + dataPlatformInstance1.getDeprecation().getActor(), + TEST_DATAPLATFORMINSTANCE_1_DEPRECATION.getActor().toString()); + assertEquals(dataPlatformInstance1.getOwnership().getOwners().size(), 1); + assertEquals(dataPlatformInstance1.getInstitutionalMemory().getElements().size(), 1); + assertEquals( + dataPlatformInstance1.getTags().getTags().get(0).getTag().getUrn(), + TEST_DATAPLATFORMINSTANCE_1_TAGS.getTags().get(0).getTag().toString()); + assertEquals( + dataPlatformInstance1.getStatus().getRemoved(), + TEST_DATAPLATFORMINSTANCE_1_STATUS.isRemoved().booleanValue()); + + // Assert second element is null. + assertNull(result.get(1)); + } + + @Test + public void testBatchLoadClientException() throws Exception { + EntityClient mockClient = Mockito.mock(EntityClient.class); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType type = + new com.linkedin.datahub.graphql.types.dataplatforminstance.DataPlatformInstanceType( + mockClient); + + // Execute Batch load + QueryContext context = Mockito.mock(QueryContext.class); + Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_DATAPLATFORMINSTANCE_1_URN, TEST_DATAPLATFORMINSTANCE_2_URN), + context)); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java index 3d22f1c429fd60..b28dd287e3fe4a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapperTest.java @@ -2,152 +2,185 @@ import com.linkedin.common.TimeStamp; import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.AuditStamp; import com.linkedin.datahub.graphql.generated.Dataset; import com.linkedin.datahub.graphql.generated.DatasetProperties; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.metadata.Constants; -import org.testng.Assert; -import org.testng.annotations.Test; - import java.util.HashMap; import java.util.Map; +import org.testng.Assert; +import org.testng.annotations.Test; public class DatasetMapperTest { - private static final Urn TEST_DATASET_URN = Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); - private static final Urn TEST_CREATED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); - private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); - - @Test - public void testDatasetPropertiesMapperWithCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - input.setQualifiedName("Test QualifiedName"); - - final TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - final TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setQualifiedName("Test QualifiedName"); - expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); - expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - Assert.assertEquals(actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(null); - expectedDatasetProperties.setCreated(null); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } - - @Test - public void testDatasetPropertiesMapperWithoutTimestampActors() { - final com.linkedin.dataset.DatasetProperties input = new com.linkedin.dataset.DatasetProperties(); - input.setName("Test"); - - TimeStamp createdTimestamp = new TimeStamp(); - createdTimestamp.setTime(10L); - input.setCreated(createdTimestamp); - - TimeStamp lastModifiedTimestamp = new TimeStamp(); - lastModifiedTimestamp.setTime(20L); - input.setLastModified(lastModifiedTimestamp); - - final Map dataSetPropertiesAspects = new HashMap<>(); - dataSetPropertiesAspects.put( - Constants.DATASET_PROPERTIES_ASPECT_NAME, - new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); - final EntityResponse response = new EntityResponse() - .setEntityName(Constants.DATASET_ENTITY_NAME) - .setUrn(TEST_DATASET_URN) - .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); - final Dataset actual = DatasetMapper.map(response); - - - final Dataset expected = new Dataset(); - expected.setUrn(TEST_DATASET_URN.toString()); - final DatasetProperties expectedDatasetProperties = new DatasetProperties(); - expectedDatasetProperties.setName("Test"); - expectedDatasetProperties.setLastModifiedActor(null); - expectedDatasetProperties.setCreatedActor(null); - expectedDatasetProperties.setLastModified(20L); - expectedDatasetProperties.setCreated(10L); - expected.setProperties(expectedDatasetProperties); - - Assert.assertEquals(actual.getUrn(), expected.getUrn()); - Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); - - Assert.assertEquals(actual.getProperties().getLastModified(), expected.getProperties().getLastModified()); - Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); - - Assert.assertEquals(actual.getProperties().getLastModifiedActor(), expected.getProperties().getLastModifiedActor()); - Assert.assertEquals(actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); - - } + private static final Urn TEST_DATASET_URN = + Urn.createFromTuple(Constants.DATASET_ENTITY_NAME, "test"); + private static final Urn TEST_CREATED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "created"); + private static final Urn TEST_LAST_MODIFIED_ACTOR_URN = + Urn.createFromTuple(Constants.CORP_USER_ENTITY_NAME, "lastmodified"); + + @Test + public void testDatasetPropertiesMapperWithCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + input.setQualifiedName("Test QualifiedName"); + + final TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setActor(TEST_CREATED_ACTOR_URN); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + final TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setActor(TEST_LAST_MODIFIED_ACTOR_URN); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setQualifiedName("Test QualifiedName"); + expectedDatasetProperties.setLastModifiedActor(TEST_LAST_MODIFIED_ACTOR_URN.toString()); + expectedDatasetProperties.setCreatedActor(TEST_CREATED_ACTOR_URN.toString()); + expectedDatasetProperties.setLastModified( + new AuditStamp(20L, TEST_LAST_MODIFIED_ACTOR_URN.toString())); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + Assert.assertEquals( + actual.getProperties().getQualifiedName(), expected.getProperties().getQualifiedName()); + + Assert.assertEquals( + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutCreatedAndLastModified() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(new AuditStamp(0L, null)); + expectedDatasetProperties.setCreated(null); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } + + @Test + public void testDatasetPropertiesMapperWithoutTimestampActors() { + final com.linkedin.dataset.DatasetProperties input = + new com.linkedin.dataset.DatasetProperties(); + input.setName("Test"); + + TimeStamp createdTimestamp = new TimeStamp(); + createdTimestamp.setTime(10L); + input.setCreated(createdTimestamp); + + TimeStamp lastModifiedTimestamp = new TimeStamp(); + lastModifiedTimestamp.setTime(20L); + input.setLastModified(lastModifiedTimestamp); + + final Map dataSetPropertiesAspects = + new HashMap<>(); + dataSetPropertiesAspects.put( + Constants.DATASET_PROPERTIES_ASPECT_NAME, + new com.linkedin.entity.EnvelopedAspect().setValue(new Aspect(input.data()))); + final EntityResponse response = + new EntityResponse() + .setEntityName(Constants.DATASET_ENTITY_NAME) + .setUrn(TEST_DATASET_URN) + .setAspects(new EnvelopedAspectMap(dataSetPropertiesAspects)); + final Dataset actual = DatasetMapper.map(response); + + final Dataset expected = new Dataset(); + expected.setUrn(TEST_DATASET_URN.toString()); + final DatasetProperties expectedDatasetProperties = new DatasetProperties(); + expectedDatasetProperties.setName("Test"); + expectedDatasetProperties.setLastModifiedActor(null); + expectedDatasetProperties.setCreatedActor(null); + expectedDatasetProperties.setLastModified(new AuditStamp(20L, null)); + expectedDatasetProperties.setCreated(10L); + expected.setProperties(expectedDatasetProperties); + + Assert.assertEquals(actual.getUrn(), expected.getUrn()); + Assert.assertEquals(actual.getProperties().getName(), expected.getProperties().getName()); + + Assert.assertEquals( + actual.getProperties().getLastModified().getTime(), + expected.getProperties().getLastModified().getTime()); + Assert.assertEquals( + actual.getProperties().getLastModified().getActor(), + expected.getProperties().getLastModified().getActor()); + Assert.assertEquals(actual.getProperties().getCreated(), expected.getProperties().getCreated()); + + Assert.assertEquals( + actual.getProperties().getLastModifiedActor(), + expected.getProperties().getLastModifiedActor()); + Assert.assertEquals( + actual.getProperties().getCreatedActor(), expected.getProperties().getCreatedActor()); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java index 78cdaa0a276da0..612136d1f9164a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetProfileMapperTest.java @@ -19,90 +19,128 @@ public void testMapperFullProfile() { input.setRowCount(10L); input.setColumnCount(45L); input.setSizeInBytes(15L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setMax("1") - .setMean("2") - .setStdev("3") - .setMedian("4") - .setMin("5") - .setNullCount(20L) - .setNullProportion(20.5f) - .setUniqueCount(30L) - .setUniqueProportion(30.5f) - .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setNullCount(30L) - .setNullProportion(30.5f) - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - .setSampleValues(new StringArray(ImmutableList.of("val3", "val4"))) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setMax("1") + .setMean("2") + .setStdev("3") + .setMedian("4") + .setMin("5") + .setNullCount(20L) + .setNullProportion(20.5f) + .setUniqueCount(30L) + .setUniqueProportion(30.5f) + .setSampleValues(new StringArray(ImmutableList.of("val1", "val2"))), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setNullCount(30L) + .setNullProportion(30.5f) + .setUniqueCount(40L) + .setUniqueProportion(40.5f) + .setSampleValues(new StringArray(ImmutableList.of("val3", "val4")))))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); expected.setSizeInBytes(15L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - 20L, - 20.5f, - "5", - "1", - "2", - "4", - "3", - new ArrayList<>(ImmutableList.of("val1", "val2"))), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - 30L, - 30.5f, - "6", - "2", - "3", - "5", - "4", - new ArrayList<>(ImmutableList.of("val3", "val4"))) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", + 30L, + 30.5f, + 20L, + 20.5f, + "5", + "1", + "2", + "4", + "3", + new ArrayList<>(ImmutableList.of("val1", "val2"))), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", + 40L, + 40.5f, + 30L, + 30.5f, + "6", + "2", + "3", + "5", + "4", + new ArrayList<>(ImmutableList.of("val3", "val4")))))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } @Test @@ -111,77 +149,95 @@ public void testMapperPartialProfile() { input.setTimestampMillis(1L); input.setRowCount(10L); input.setColumnCount(45L); - input.setFieldProfiles(new DatasetFieldProfileArray(ImmutableList.of( - new DatasetFieldProfile().setFieldPath("/field1") - .setUniqueCount(30L) - .setUniqueProportion(30.5f), - new DatasetFieldProfile().setFieldPath("/field2") - .setMax("2") - .setMean("3") - .setStdev("4") - .setMedian("5") - .setMin("6") - .setUniqueCount(40L) - .setUniqueProportion(40.5f) - ))); - final EnvelopedAspect inputAspect = new EnvelopedAspect() - .setAspect(GenericRecordUtils.serializeAspect(input)); + input.setFieldProfiles( + new DatasetFieldProfileArray( + ImmutableList.of( + new DatasetFieldProfile() + .setFieldPath("/field1") + .setUniqueCount(30L) + .setUniqueProportion(30.5f), + new DatasetFieldProfile() + .setFieldPath("/field2") + .setMax("2") + .setMean("3") + .setStdev("4") + .setMedian("5") + .setMin("6") + .setUniqueCount(40L) + .setUniqueProportion(40.5f)))); + final EnvelopedAspect inputAspect = + new EnvelopedAspect().setAspect(GenericRecordUtils.serializeAspect(input)); final DatasetProfile actual = DatasetProfileMapper.map(inputAspect); final DatasetProfile expected = new DatasetProfile(); expected.setTimestampMillis(1L); expected.setRowCount(10L); expected.setColumnCount(45L); - expected.setFieldProfiles(new ArrayList<>( - ImmutableList.of( - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field1", - 30L, - 30.5f, - null, - null, - null, - null, - null, - null, - null, - null), - new com.linkedin.datahub.graphql.generated.DatasetFieldProfile("/field2", - 40L, - 40.5f, - null, - null, - "6", - "2", - "3", - "5", - "4", - null) - ) - )); + expected.setFieldProfiles( + new ArrayList<>( + ImmutableList.of( + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field1", 30L, 30.5f, null, null, null, null, null, null, null, null), + new com.linkedin.datahub.graphql.generated.DatasetFieldProfile( + "/field2", 40L, 40.5f, null, null, "6", "2", "3", "5", "4", null)))); Assert.assertEquals(actual.getTimestampMillis(), expected.getTimestampMillis()); Assert.assertEquals(actual.getRowCount(), expected.getRowCount()); Assert.assertEquals(actual.getColumnCount(), expected.getColumnCount()); Assert.assertEquals(actual.getSizeInBytes(), expected.getSizeInBytes()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getFieldPath(), expected.getFieldProfiles().get(0).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getMedian(), expected.getFieldProfiles().get(0).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullCount(), expected.getFieldProfiles().get(0).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getNullProportion(), expected.getFieldProfiles().get(0).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueCount(), expected.getFieldProfiles().get(0).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getUniqueProportion(), expected.getFieldProfiles().get(0).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(0).getSampleValues(), expected.getFieldProfiles().get(0).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getFieldPath(), + expected.getFieldProfiles().get(0).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMax(), expected.getFieldProfiles().get(0).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMean(), expected.getFieldProfiles().get(0).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getMedian(), + expected.getFieldProfiles().get(0).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullCount(), + expected.getFieldProfiles().get(0).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getNullProportion(), + expected.getFieldProfiles().get(0).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getStdev(), expected.getFieldProfiles().get(0).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueCount(), + expected.getFieldProfiles().get(0).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getUniqueProportion(), + expected.getFieldProfiles().get(0).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(0).getSampleValues(), + expected.getFieldProfiles().get(0).getSampleValues()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getFieldPath(), expected.getFieldProfiles().get(1).getFieldPath()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getMedian(), expected.getFieldProfiles().get(1).getMedian()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullCount(), expected.getFieldProfiles().get(1).getNullCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getNullProportion(), expected.getFieldProfiles().get(1).getNullProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueCount(), expected.getFieldProfiles().get(1).getUniqueCount()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getUniqueProportion(), expected.getFieldProfiles().get(1).getUniqueProportion()); - Assert.assertEquals(actual.getFieldProfiles().get(1).getSampleValues(), expected.getFieldProfiles().get(1).getSampleValues()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getFieldPath(), + expected.getFieldProfiles().get(1).getFieldPath()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMax(), expected.getFieldProfiles().get(1).getMax()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMean(), expected.getFieldProfiles().get(1).getMean()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getMedian(), + expected.getFieldProfiles().get(1).getMedian()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullCount(), + expected.getFieldProfiles().get(1).getNullCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getNullProportion(), + expected.getFieldProfiles().get(1).getNullProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getStdev(), expected.getFieldProfiles().get(1).getStdev()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueCount(), + expected.getFieldProfiles().get(1).getUniqueCount()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getUniqueProportion(), + expected.getFieldProfiles().get(1).getUniqueProportion()); + Assert.assertEquals( + actual.getFieldProfiles().get(1).getSampleValues(), + expected.getFieldProfiles().get(1).getSampleValues()); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java index 48c23f436f875e..32735ad7874a09 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/domain/DomainTypeTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.types.domain; +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -30,33 +33,34 @@ import java.util.HashSet; import java.util.List; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static com.linkedin.datahub.graphql.TestUtils.*; -import static org.testng.Assert.*; - public class DomainTypeTest { private static final String TEST_DOMAIN_1_URN = "urn:li:domain:id-1"; - private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey() - .setId("id-1"); - private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = new DomainProperties() - .setDescription("test description") - .setName("Test Domain"); - private static final Ownership TEST_DOMAIN_1_OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); + private static final DomainKey TEST_DOMAIN_1_KEY = new DomainKey().setId("id-1"); + private static final DomainProperties TEST_DOMAIN_1_PROPERTIES = + new DomainProperties().setDescription("test description").setName("Test Domain"); + private static final Ownership TEST_DOMAIN_1_OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory TEST_DOMAIN_1_INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); private static final String TEST_DOMAIN_2_URN = "urn:li:domain:id-2"; @@ -68,39 +72,48 @@ public void testBatchLoad() throws Exception { Urn domainUrn1 = Urn.createFromString(TEST_DOMAIN_1_URN); Urn domainUrn2 = Urn.createFromString(TEST_DOMAIN_2_URN); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - domainUrn1, - new EntityResponse() - .setEntityName(Constants.DOMAIN_ENTITY_NAME) - .setUrn(domainUrn1) - .setAspects(new EnvelopedAspectMap(ImmutableMap.of( - Constants.DOMAIN_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), - Constants.DOMAIN_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), - Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), - Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data())) - ))))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(domainUrn1, domainUrn2))), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + domainUrn1, + new EntityResponse() + .setEntityName(Constants.DOMAIN_ENTITY_NAME) + .setUrn(domainUrn1) + .setAspects( + new EnvelopedAspectMap( + ImmutableMap.of( + Constants.DOMAIN_KEY_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_KEY.data())), + Constants.DOMAIN_PROPERTIES_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_PROPERTIES.data())), + Constants.OWNERSHIP_ASPECT_NAME, + new EnvelopedAspect() + .setValue(new Aspect(TEST_DOMAIN_1_OWNERSHIP.data())), + Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, + new EnvelopedAspect() + .setValue( + new Aspect(TEST_DOMAIN_1_INSTITUTIONAL_MEMORY.data()))))))); DomainType type = new DomainType(client); QueryContext mockContext = getMockAllowContext(); - List> result = type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DOMAIN_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), - Mockito.eq(DomainType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DOMAIN_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(domainUrn1, domainUrn2)), + Mockito.eq(DomainType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -120,17 +133,20 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); DomainType type = new DomainType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_DOMAIN_1_URN, TEST_DOMAIN_2_URN), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java index 918616a2705b7e..f88c8285e20df9 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/notebook/NotebookTypeTest.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.graphql.types.notebook; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -31,16 +33,9 @@ import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; -import com.linkedin.notebook.NotebookCell; -import com.linkedin.notebook.NotebookCellArray; -import com.linkedin.notebook.NotebookCellType; -import com.linkedin.notebook.NotebookContent; -import com.linkedin.notebook.NotebookInfo; -import com.linkedin.notebook.EditableNotebookProperties; -import com.linkedin.notebook.TextCell; import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Notebook; import com.linkedin.datahub.graphql.types.container.ContainerType; import com.linkedin.domain.Domains; import com.linkedin.entity.Aspect; @@ -50,6 +45,13 @@ import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.NotebookKey; +import com.linkedin.notebook.EditableNotebookProperties; +import com.linkedin.notebook.NotebookCell; +import com.linkedin.notebook.NotebookCellArray; +import com.linkedin.notebook.NotebookCellType; +import com.linkedin.notebook.NotebookContent; +import com.linkedin.notebook.NotebookInfo; +import com.linkedin.notebook.TextCell; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -60,58 +62,75 @@ import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.Assert.*; - - public class NotebookTypeTest { private static final String TEST_NOTEBOOK = "urn:li:notebook:(querybook,123)"; - private static final NotebookKey NOTEBOOK_KEY = new NotebookKey() - .setNotebookId("123") - .setNotebookTool("querybook"); - private static final NotebookContent NOTEBOOK_CONTENT = new NotebookContent() - .setCells(new NotebookCellArray(ImmutableList.of(new NotebookCell() - .setType(NotebookCellType.TEXT_CELL) - .setTextCell(new TextCell() - .setCellId("1234") - .setCellTitle("test cell") - .setText("test text") - .setChangeAuditStamps(new ChangeAuditStamps()))))); - private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = new EditableNotebookProperties() - .setDescription("test editable description"); - private static final Ownership OWNERSHIP = new Ownership() - .setOwners( - new OwnerArray(ImmutableList.of( - new Owner() - .setType(OwnershipType.DATAOWNER) - .setOwner(Urn.createFromTuple("corpuser", "test"))))); - private static final InstitutionalMemory INSTITUTIONAL_MEMORY = new InstitutionalMemory() - .setElements( - new InstitutionalMemoryMetadataArray(ImmutableList.of( - new InstitutionalMemoryMetadata() - .setUrl(new Url("https://www.test.com")) - .setDescription("test description") - .setCreateStamp(new AuditStamp().setTime(0L).setActor(Urn.createFromTuple("corpuser", "test")))))); - - private static final SubTypes SUB_TYPES = new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); - - private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = new DataPlatformInstance() - .setPlatform(new DataPlatformUrn("test_platform")); - - private static final NotebookInfo NOTEBOOK_INFO = new NotebookInfo() - .setTitle("title") - .setExternalUrl(new Url("https://querybook.com/notebook/123")) - .setChangeAuditStamps(new ChangeAuditStamps()) - .setDescription("test doc"); - - private static final Status STATUS = new Status() - .setRemoved(false); - - private static final Domains DOMAINS = new Domains() - .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); - private static final GlobalTags GLOBAL_TAGS = new GlobalTags() - .setTags(new TagAssociationArray(ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); - private static final GlossaryTerms TEST_GLOSSARY_TERMS = new GlossaryTerms() - .setTerms(new GlossaryTermAssociationArray(ImmutableList.of(new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); + private static final NotebookKey NOTEBOOK_KEY = + new NotebookKey().setNotebookId("123").setNotebookTool("querybook"); + private static final NotebookContent NOTEBOOK_CONTENT = + new NotebookContent() + .setCells( + new NotebookCellArray( + ImmutableList.of( + new NotebookCell() + .setType(NotebookCellType.TEXT_CELL) + .setTextCell( + new TextCell() + .setCellId("1234") + .setCellTitle("test cell") + .setText("test text") + .setChangeAuditStamps(new ChangeAuditStamps()))))); + private static final EditableNotebookProperties TEST_EDITABLE_DESCRIPTION = + new EditableNotebookProperties().setDescription("test editable description"); + private static final Ownership OWNERSHIP = + new Ownership() + .setOwners( + new OwnerArray( + ImmutableList.of( + new Owner() + .setType(OwnershipType.DATAOWNER) + .setOwner(Urn.createFromTuple("corpuser", "test"))))); + private static final InstitutionalMemory INSTITUTIONAL_MEMORY = + new InstitutionalMemory() + .setElements( + new InstitutionalMemoryMetadataArray( + ImmutableList.of( + new InstitutionalMemoryMetadata() + .setUrl(new Url("https://www.test.com")) + .setDescription("test description") + .setCreateStamp( + new AuditStamp() + .setTime(0L) + .setActor(Urn.createFromTuple("corpuser", "test")))))); + + private static final SubTypes SUB_TYPES = + new SubTypes().setTypeNames(new StringArray(ImmutableList.of("DataDoc"))); + + private static final DataPlatformInstance DATA_PLATFORM_INSTANCE = + new DataPlatformInstance().setPlatform(new DataPlatformUrn("test_platform")); + + private static final NotebookInfo NOTEBOOK_INFO = + new NotebookInfo() + .setTitle("title") + .setExternalUrl(new Url("https://querybook.com/notebook/123")) + .setChangeAuditStamps(new ChangeAuditStamps()) + .setDescription("test doc"); + + private static final Status STATUS = new Status().setRemoved(false); + + private static final Domains DOMAINS = + new Domains() + .setDomains(new UrnArray(ImmutableList.of(UrnUtils.getUrn("urn:li:domain:123")))); + private static final GlobalTags GLOBAL_TAGS = + new GlobalTags() + .setTags( + new TagAssociationArray( + ImmutableList.of(new TagAssociation().setTag(new TagUrn("test"))))); + private static final GlossaryTerms TEST_GLOSSARY_TERMS = + new GlossaryTerms() + .setTerms( + new GlossaryTermAssociationArray( + ImmutableList.of( + new GlossaryTermAssociation().setUrn(new GlossaryTermUrn("term"))))); @Test public void testBatchLoad() throws Exception { @@ -121,79 +140,69 @@ public void testBatchLoad() throws Exception { Map notebookAspects = new HashMap<>(); notebookAspects.put( Constants.NOTEBOOK_KEY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_KEY.data()))); notebookAspects.put( Constants.NOTEBOOK_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_INFO.data()))); notebookAspects.put( Constants.NOTEBOOK_CONTENT_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data())) - ); + new EnvelopedAspect().setValue(new Aspect(NOTEBOOK_CONTENT.data()))); notebookAspects.put( Constants.EDITABLE_NOTEBOOK_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_EDITABLE_DESCRIPTION.data()))); notebookAspects.put( Constants.OWNERSHIP_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data())) - ); + new EnvelopedAspect().setValue(new Aspect(OWNERSHIP.data()))); notebookAspects.put( Constants.INSTITUTIONAL_MEMORY_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data())) - ); + new EnvelopedAspect().setValue(new Aspect(INSTITUTIONAL_MEMORY.data()))); notebookAspects.put( - Constants.STATUS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(STATUS.data())) - ); + Constants.STATUS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(STATUS.data()))); notebookAspects.put( Constants.GLOBAL_TAGS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data())) - ); + new EnvelopedAspect().setValue(new Aspect(GLOBAL_TAGS.data()))); notebookAspects.put( - Constants.DOMAINS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(DOMAINS.data())) - ); + Constants.DOMAINS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DOMAINS.data()))); notebookAspects.put( Constants.SUB_TYPES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data())) - ); + new EnvelopedAspect().setValue(new Aspect(SUB_TYPES.data()))); notebookAspects.put( Constants.GLOSSARY_TERMS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data())) - ); - notebookAspects.put(Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(TEST_GLOSSARY_TERMS.data()))); + notebookAspects.put( + Constants.DATA_PLATFORM_INSTANCE_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(DATA_PLATFORM_INSTANCE.data()))); Urn notebookUrn = new NotebookUrn("querybook", "123"); Urn dummyNotebookUrn = new NotebookUrn("querybook", "dummy"); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - notebookUrn, - new EntityResponse() - .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) - .setUrn(notebookUrn) - .setAspects(new EnvelopedAspectMap(notebookAspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(notebookUrn, dummyNotebookUrn))), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + notebookUrn, + new EntityResponse() + .setEntityName(Constants.NOTEBOOK_ENTITY_NAME) + .setUrn(notebookUrn) + .setAspects(new EnvelopedAspectMap(notebookAspects)))); NotebookType type = new NotebookType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> - result = type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_NOTEBOOK, dummyNotebookUrn.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), - Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.NOTEBOOK_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(notebookUrn, dummyNotebookUrn)), + Mockito.eq(NotebookType.ASPECTS_TO_RESOLVE), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -201,13 +210,17 @@ public void testBatchLoad() throws Exception { Notebook notebook = result.get(0).getData(); assertEquals(notebook.getContent().getCells().size(), NOTEBOOK_CONTENT.getCells().size()); - assertEquals(notebook.getContent().getCells().get(0).getType().toString(), + assertEquals( + notebook.getContent().getCells().get(0).getType().toString(), NOTEBOOK_CONTENT.getCells().get(0).getType().toString()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellId(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellId(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellId()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getCellTitle(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getCellTitle()); - assertEquals(notebook.getContent().getCells().get(0).getTextCell().getText(), + assertEquals( + notebook.getContent().getCells().get(0).getTextCell().getText(), NOTEBOOK_CONTENT.getCells().get(0).getTextCell().getText()); assertEquals(notebook.getInfo().getDescription(), NOTEBOOK_INFO.getDescription()); assertEquals(notebook.getInfo().getExternalUrl(), NOTEBOOK_INFO.getExternalUrl().toString()); @@ -217,11 +230,17 @@ public void testBatchLoad() throws Exception { assertEquals(notebook.getType(), EntityType.NOTEBOOK); assertEquals(notebook.getOwnership().getOwners().size(), 1); assertEquals(notebook.getInstitutionalMemory().getElements().size(), 1); - assertEquals(notebook.getEditableProperties().getDescription(), TEST_EDITABLE_DESCRIPTION.getDescription()); - assertEquals(notebook.getTags().getTags().get(0).getTag().getUrn(), + assertEquals( + notebook.getEditableProperties().getDescription(), + TEST_EDITABLE_DESCRIPTION.getDescription()); + assertEquals( + notebook.getTags().getTags().get(0).getTag().getUrn(), GLOBAL_TAGS.getTags().get(0).getTag().toString()); - assertEquals(notebook.getSubTypes().getTypeNames(), SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); - assertEquals(notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), + assertEquals( + notebook.getSubTypes().getTypeNames(), + SUB_TYPES.getTypeNames().stream().collect(Collectors.toList())); + assertEquals( + notebook.getGlossaryTerms().getTerms().get(0).getTerm().getUrn(), TEST_GLOSSARY_TERMS.getTerms().get(0).getUrn().toString()); assertEquals(notebook.getPlatform().getUrn(), DATA_PLATFORM_INSTANCE.getPlatform().toString()); @@ -232,17 +251,19 @@ public void testBatchLoad() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); ContainerType type = new ContainerType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), - context)); + assertThrows( + RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_NOTEBOOK), context)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java index a3c089b91de875..c8f694320d88ae 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/query/QueryTypeTest.java @@ -1,28 +1,30 @@ package com.linkedin.datahub.graphql.types.query; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; -import com.linkedin.common.urn.UrnUtils; -import com.linkedin.datahub.graphql.generated.QueryEntity; -import com.linkedin.query.QueryLanguage; -import com.linkedin.query.QueryProperties; -import com.linkedin.query.QuerySource; -import com.linkedin.query.QueryStatement; -import com.linkedin.query.QuerySubject; -import com.linkedin.query.QuerySubjectArray; -import com.linkedin.query.QuerySubjects; import com.linkedin.common.urn.Urn; +import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.QueryEntity; import com.linkedin.entity.Aspect; import com.linkedin.entity.EntityResponse; import com.linkedin.entity.EnvelopedAspect; import com.linkedin.entity.EnvelopedAspectMap; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; +import com.linkedin.query.QueryLanguage; +import com.linkedin.query.QueryProperties; +import com.linkedin.query.QuerySource; +import com.linkedin.query.QueryStatement; +import com.linkedin.query.QuerySubject; +import com.linkedin.query.QuerySubjectArray; +import com.linkedin.query.QuerySubjects; import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; @@ -30,53 +32,50 @@ import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class QueryTypeTest { private static final Urn TEST_QUERY_URN = UrnUtils.getUrn("urn:li:query:test"); private static final Urn TEST_QUERY_2_URN = UrnUtils.getUrn("urn:li:query:test-2"); - private static final Urn TEST_DATASET_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); - private static final Urn TEST_DATASET_2_URN = UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); + private static final Urn TEST_DATASET_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test,PROD)"); + private static final Urn TEST_DATASET_2_URN = + UrnUtils.getUrn("urn:li:dataset:(urn:li:dataPlatform:hive,test-2,PROD)"); private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); - private static final QueryProperties TEST_QUERY_PROPERTIES_1 = new QueryProperties() - .setName("Query Name") - .setDescription("Query Description") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_URN) - ) - )); - private static final QueryProperties TEST_QUERY_PROPERTIES_2 = new QueryProperties() - .setName("Query Name 2") - .setDescription("Query Description 2") - .setSource(QuerySource.MANUAL) - .setStatement(new QueryStatement() - .setLanguage(QueryLanguage.SQL) - .setValue("SELECT * FROM MyTestTable2") - ) - .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) - .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); - private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = new QuerySubjects() - .setSubjects(new QuerySubjectArray( - ImmutableList.of( - new QuerySubject() - .setEntity(TEST_DATASET_2_URN) - ) - )); + private static final QueryProperties TEST_QUERY_PROPERTIES_1 = + new QueryProperties() + .setName("Query Name") + .setDescription("Query Description") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_1 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_URN)))); + private static final QueryProperties TEST_QUERY_PROPERTIES_2 = + new QueryProperties() + .setName("Query Name 2") + .setDescription("Query Description 2") + .setSource(QuerySource.MANUAL) + .setStatement( + new QueryStatement() + .setLanguage(QueryLanguage.SQL) + .setValue("SELECT * FROM MyTestTable2")) + .setCreated(new AuditStamp().setActor(TEST_USER_URN).setTime(0L)) + .setLastModified(new AuditStamp().setActor(TEST_USER_URN).setTime(1L)); + private static final QuerySubjects TEST_QUERY_SUBJECTS_2 = + new QuerySubjects() + .setSubjects( + new QuerySubjectArray( + ImmutableList.of(new QuerySubject().setEntity(TEST_DATASET_2_URN)))); @Test public void testBatchLoad() throws Exception { @@ -87,38 +86,54 @@ public void testBatchLoad() throws Exception { Urn queryUrn2 = TEST_QUERY_2_URN; Map query1Aspects = new HashMap<>(); - query1Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); - query1Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query1Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); Map query2Aspects = new HashMap<>(); - query2Aspects.put(Constants.QUERY_PROPERTIES_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_PROPERTIES_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_2.data()))); - query2Aspects.put(Constants.QUERY_SUBJECTS_ASPECT_NAME, + query2Aspects.put( + Constants.QUERY_SUBJECTS_ASPECT_NAME, new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_2.data()))); - Mockito.when(client.batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of(queryUrn1, new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)), queryUrn2, - new EntityResponse().setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn2) - .setAspects(new EnvelopedAspectMap(query2Aspects)))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)), + queryUrn2, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn2) + .setAspects(new EnvelopedAspectMap(query2Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); List> result = - type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response Mockito.verify(client, Mockito.times(1)) - .batchGetV2(Mockito.eq(Constants.QUERY_ENTITY_NAME), Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), Mockito.eq(QueryType.ASPECTS_TO_FETCH), + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -141,40 +156,39 @@ public void testBatchLoadNullEntity() throws Exception { Map query1Aspects = new HashMap<>(); query1Aspects.put( Constants.QUERY_PROPERTIES_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data())) - ); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_PROPERTIES_1.data()))); query1Aspects.put( Constants.QUERY_SUBJECTS_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - queryUrn1, - new EntityResponse() - .setEntityName(Constants.QUERY_ENTITY_NAME) - .setUrn(queryUrn1) - .setAspects(new EnvelopedAspectMap(query1Aspects)))); + new EnvelopedAspect().setValue(new Aspect(TEST_QUERY_SUBJECTS_1.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(queryUrn1, queryUrn2))), + Mockito.eq(com.linkedin.datahub.graphql.types.query.QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + queryUrn1, + new EntityResponse() + .setEntityName(Constants.QUERY_ENTITY_NAME) + .setUrn(queryUrn1) + .setAspects(new EnvelopedAspectMap(query1Aspects)))); QueryType type = new QueryType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of( - TEST_QUERY_URN.toString(), - TEST_QUERY_2_URN.toString()), - mockContext); + List> result = + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.QUERY_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), - Mockito.eq(QueryType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.QUERY_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(queryUrn1, queryUrn2)), + Mockito.eq(QueryType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -188,18 +202,23 @@ public void testBatchLoadNullEntity() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); QueryType type = new QueryType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), - context)); + assertThrows( + RuntimeException.class, + () -> + type.batchLoad( + ImmutableList.of(TEST_QUERY_URN.toString(), TEST_QUERY_2_URN.toString()), context)); } private void verifyQuery1(QueryEntity query) { @@ -207,14 +226,30 @@ private void verifyQuery1(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_1.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_1.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_1.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_1.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_1.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_1.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_1.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_1.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_1.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_1.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_1.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_1.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_1.getSubjects().get(0).getEntity().toString()); } private void verifyQuery2(QueryEntity query) { @@ -222,13 +257,29 @@ private void verifyQuery2(QueryEntity query) { assertEquals(query.getType(), EntityType.QUERY); assertEquals(query.getProperties().getName(), TEST_QUERY_PROPERTIES_2.getName()); assertEquals(query.getProperties().getDescription(), TEST_QUERY_PROPERTIES_2.getDescription()); - assertEquals(query.getProperties().getSource().toString(), TEST_QUERY_PROPERTIES_2.getSource().toString()); - assertEquals(query.getProperties().getStatement().getLanguage().toString(), TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); - assertEquals(query.getProperties().getStatement().getValue(), TEST_QUERY_PROPERTIES_2.getStatement().getValue()); - assertEquals(query.getProperties().getCreated().getActor(), TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); - assertEquals(query.getProperties().getCreated().getTime(), TEST_QUERY_PROPERTIES_2.getCreated().getTime()); - assertEquals(query.getProperties().getLastModified().getActor(), TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); - assertEquals(query.getProperties().getLastModified().getTime(), TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); - assertEquals(query.getSubjects().get(0).getDataset().getUrn(), TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); + assertEquals( + query.getProperties().getSource().toString(), + TEST_QUERY_PROPERTIES_2.getSource().toString()); + assertEquals( + query.getProperties().getStatement().getLanguage().toString(), + TEST_QUERY_PROPERTIES_2.getStatement().getLanguage().toString()); + assertEquals( + query.getProperties().getStatement().getValue(), + TEST_QUERY_PROPERTIES_2.getStatement().getValue()); + assertEquals( + query.getProperties().getCreated().getActor(), + TEST_QUERY_PROPERTIES_2.getCreated().getActor().toString()); + assertEquals( + query.getProperties().getCreated().getTime(), + TEST_QUERY_PROPERTIES_2.getCreated().getTime()); + assertEquals( + query.getProperties().getLastModified().getActor(), + TEST_QUERY_PROPERTIES_2.getLastModified().getActor().toString()); + assertEquals( + query.getProperties().getLastModified().getTime(), + TEST_QUERY_PROPERTIES_2.getLastModified().getTime()); + assertEquals( + query.getSubjects().get(0).getDataset().getUrn(), + TEST_QUERY_SUBJECTS_2.getSubjects().get(0).getEntity().toString()); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java index 7f3c8f99f6593a..f02fd38e2ca7ca 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/types/view/DataHubViewTypeTest.java @@ -1,114 +1,149 @@ package com.linkedin.datahub.graphql.types.view; +import static org.testng.Assert.*; + import com.datahub.authentication.Authentication; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; +import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.data.template.StringArray; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.DataHubView; +import com.linkedin.datahub.graphql.generated.EntityType; import com.linkedin.datahub.graphql.generated.FilterOperator; import com.linkedin.datahub.graphql.generated.LogicalOperator; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.Constants; import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; +import com.linkedin.r2.RemoteInvocationException; import com.linkedin.view.DataHubViewDefinition; import com.linkedin.view.DataHubViewInfo; import com.linkedin.view.DataHubViewType; -import com.linkedin.common.urn.Urn; -import com.linkedin.datahub.graphql.QueryContext; -import com.linkedin.datahub.graphql.generated.DataHubView; -import com.linkedin.datahub.graphql.generated.EntityType; -import com.linkedin.entity.Aspect; -import com.linkedin.entity.EntityResponse; -import com.linkedin.entity.EnvelopedAspect; -import com.linkedin.entity.EnvelopedAspectMap; -import com.linkedin.entity.client.EntityClient; -import com.linkedin.metadata.Constants; -import com.linkedin.r2.RemoteInvocationException; import graphql.execution.DataFetcherResult; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import org.mockito.Mockito; - import org.testng.annotations.Test; -import static org.testng.Assert.*; - public class DataHubViewTypeTest { private static final String TEST_VIEW_URN = "urn:li:dataHubView:test"; private static final Urn TEST_USER_URN = UrnUtils.getUrn("urn:li:corpuser:test"); + /** * A Valid View is one which is minted by the createView or updateView GraphQL resolvers. * - * View Definitions currently support a limited Filter structure, which includes a single Logical filter set. - * Either a set of OR criteria with 1 value in each nested "and", or a single OR criteria with a set of nested ANDs. + *

View Definitions currently support a limited Filter structure, which includes a single + * Logical filter set. Either a set of OR criteria with 1 value in each nested "and", or a single + * OR criteria with a set of nested ANDs. * - * This enables us to easily support merging more complex View predicates in the future without a data migration, - * should the need arise. + *

This enables us to easily support merging more complex View predicates in the future without + * a data migration, should the need arise. */ - private static final DataHubViewInfo TEST_VALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_VALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); /** - * An Invalid View is on which has been ingested manually, which should not occur under normal operation of DataHub. + * An Invalid View is on which has been ingested manually, which should not occur under normal + * operation of DataHub. * - * This would be a complex view with multiple OR and nested AND predicates. + *

This would be a complex view with multiple OR and nested AND predicates. */ - private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = new DataHubViewInfo() - .setType(DataHubViewType.PERSONAL) - .setName("test") - .setDescription("test description") - .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) - .setDefinition(new DataHubViewDefinition() - .setFilter(new Filter() - .setOr(new ConjunctiveCriterionArray(ImmutableList.of( - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))), - new ConjunctiveCriterion().setAnd(new CriterionArray(ImmutableList.of( - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL), - new Criterion() - .setValues(new StringArray(ImmutableList.of("value1", "value2"))) - .setField("test2") - .setCondition(Condition.EQUAL) - ))) - ))) - ) - .setEntityTypes(new StringArray(ImmutableList.of(Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); + private static final DataHubViewInfo TEST_INVALID_VIEW_INFO = + new DataHubViewInfo() + .setType(DataHubViewType.PERSONAL) + .setName("test") + .setDescription("test description") + .setCreated(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setLastModified(new AuditStamp().setTime(0L).setActor(TEST_USER_URN)) + .setDefinition( + new DataHubViewDefinition() + .setFilter( + new Filter() + .setOr( + new ConjunctiveCriterionArray( + ImmutableList.of( + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))), + new ConjunctiveCriterion() + .setAnd( + new CriterionArray( + ImmutableList.of( + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL), + new Criterion() + .setValues( + new StringArray( + ImmutableList.of( + "value1", "value2"))) + .setField("test2") + .setCondition(Condition.EQUAL)))))))) + .setEntityTypes( + new StringArray( + ImmutableList.of( + Constants.DATASET_ENTITY_NAME, Constants.DASHBOARD_ENTITY_NAME)))); private static final String TEST_VIEW_URN_2 = "urn:li:dataHubView:test2"; @@ -123,33 +158,37 @@ public void testBatchLoadValidView() throws Exception { Map view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - viewUrn1, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(viewUrn1) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_VALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(viewUrn1, viewUrn2))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + viewUrn1, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(viewUrn1) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(viewUrn1, viewUrn2)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 2); @@ -164,9 +203,12 @@ public void testBatchLoadValidView() throws Exception { assertEquals(view.getDefinition().getEntityTypes().get(1), EntityType.DASHBOARD); assertEquals(view.getDefinition().getFilter().getOperator(), LogicalOperator.AND); assertEquals(view.getDefinition().getFilter().getFilters().size(), 1); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getCondition(), FilterOperator.EQUAL); assertEquals(view.getDefinition().getFilter().getFilters().get(0).getField(), "test"); - assertEquals(view.getDefinition().getFilter().getFilters().get(0).getValues(), ImmutableList.of("value1", "value2")); + assertEquals( + view.getDefinition().getFilter().getFilters().get(0).getValues(), + ImmutableList.of("value1", "value2")); // Assert second element is null. assertNull(result.get(1)); @@ -174,40 +216,45 @@ public void testBatchLoadValidView() throws Exception { @Test public void testBatchLoadInvalidView() throws Exception { - // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log a warning). + // If an Invalid View Definition is found in MySQL, we will return an Empty no-op View. (and log + // a warning). EntityClient client = Mockito.mock(EntityClient.class); Urn invalidViewUrn = Urn.createFromString(TEST_VIEW_URN); Map view1Aspects = new HashMap<>(); view1Aspects.put( Constants.DATAHUB_VIEW_INFO_ASPECT_NAME, - new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data())) - ); - Mockito.when(client.batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class))) - .thenReturn(ImmutableMap.of( - invalidViewUrn, - new EntityResponse() - .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) - .setUrn(invalidViewUrn) - .setAspects(new EnvelopedAspectMap(view1Aspects)))); - - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); + new EnvelopedAspect().setValue(new Aspect(TEST_INVALID_VIEW_INFO.data()))); + Mockito.when( + client.batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(new HashSet<>(ImmutableSet.of(invalidViewUrn))), + Mockito.eq( + com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class))) + .thenReturn( + ImmutableMap.of( + invalidViewUrn, + new EntityResponse() + .setEntityName(Constants.DATAHUB_VIEW_ENTITY_NAME) + .setUrn(invalidViewUrn) + .setAspects(new EnvelopedAspectMap(view1Aspects)))); + + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(client); QueryContext mockContext = Mockito.mock(QueryContext.class); Mockito.when(mockContext.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - List> result = type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); + List> result = + type.batchLoad(ImmutableList.of(TEST_VIEW_URN), mockContext); // Verify response - Mockito.verify(client, Mockito.times(1)).batchGetV2( - Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), - Mockito.eq(ImmutableSet.of(invalidViewUrn)), - Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), - Mockito.any(Authentication.class) - ); + Mockito.verify(client, Mockito.times(1)) + .batchGetV2( + Mockito.eq(Constants.DATAHUB_VIEW_ENTITY_NAME), + Mockito.eq(ImmutableSet.of(invalidViewUrn)), + Mockito.eq(com.linkedin.datahub.graphql.types.view.DataHubViewType.ASPECTS_TO_FETCH), + Mockito.any(Authentication.class)); assertEquals(result.size(), 1); @@ -227,17 +274,21 @@ public void testBatchLoadInvalidView() throws Exception { @Test public void testBatchLoadClientException() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); - Mockito.doThrow(RemoteInvocationException.class).when(mockClient).batchGetV2( - Mockito.anyString(), - Mockito.anySet(), - Mockito.anySet(), - Mockito.any(Authentication.class)); - com.linkedin.datahub.graphql.types.view.DataHubViewType type = new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); + Mockito.doThrow(RemoteInvocationException.class) + .when(mockClient) + .batchGetV2( + Mockito.anyString(), + Mockito.anySet(), + Mockito.anySet(), + Mockito.any(Authentication.class)); + com.linkedin.datahub.graphql.types.view.DataHubViewType type = + new com.linkedin.datahub.graphql.types.view.DataHubViewType(mockClient); // Execute Batch load QueryContext context = Mockito.mock(QueryContext.class); Mockito.when(context.getAuthentication()).thenReturn(Mockito.mock(Authentication.class)); - assertThrows(RuntimeException.class, () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), - context)); + assertThrows( + RuntimeException.class, + () -> type.batchLoad(ImmutableList.of(TEST_VIEW_URN, TEST_VIEW_URN_2), context)); } -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java index 0a58ff88586c6d..6ecbc8d015b29a 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/DateUtilTest.java @@ -1,57 +1,50 @@ package com.linkedin.datahub.graphql.utils; +import static org.testng.AssertJUnit.assertEquals; + import com.linkedin.datahub.graphql.util.DateUtil; import org.joda.time.DateTime; import org.mockito.Mockito; import org.testng.annotations.Test; -import static org.testng.AssertJUnit.assertEquals; - public class DateUtilTest { - private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { - DateTime result = new DateTime() - .withDate(2023, 1, dayOfMonth); - if (zeroTime) { - return new DateUtil().setTimeToZero(result); - } - return result - .withHourOfDay(1) - .withMinuteOfHour(2) - .withSecondOfMinute(3) - .withMillisOfSecond(4); + private DateTime setTimeParts(int dayOfMonth, boolean zeroTime) { + DateTime result = new DateTime().withDate(2023, 1, dayOfMonth); + if (zeroTime) { + return new DateUtil().setTimeToZero(result); } + return result.withHourOfDay(1).withMinuteOfHour(2).withSecondOfMinute(3).withMillisOfSecond(4); + } - private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { - assertEquals( - setTimeParts(dayOfMonth, true).getMillis(), - dateUtil.getStartOfNextWeek().getMillis() - ); - } + private void assertEqualStartOfNextWeek(DateUtil dateUtil, int dayOfMonth) { + assertEquals( + setTimeParts(dayOfMonth, true).getMillis(), dateUtil.getStartOfNextWeek().getMillis()); + } - @Test - public void testStartOfNextWeek() { - DateUtil dateUtil = Mockito.spy(DateUtil.class); + @Test + public void testStartOfNextWeek() { + DateUtil dateUtil = Mockito.spy(DateUtil.class); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(2, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(3, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(4, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(5, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(6, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); - assertEqualStartOfNextWeek(dateUtil, 9); + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(7, false)); + assertEqualStartOfNextWeek(dateUtil, 9); - Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); - assertEqualStartOfNextWeek(dateUtil, 9); - } + Mockito.when(dateUtil.getNow()).thenReturn(setTimeParts(8, false)); + assertEqualStartOfNextWeek(dateUtil, 9); + } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java index 48ce2ddb6dde4d..0419fe0b5254d3 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/MutationsUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.*; +import static org.testng.AssertJUnit.*; + import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.resolvers.mutate.MutationUtils; import com.linkedin.identity.CorpUserInfo; @@ -7,19 +10,24 @@ import com.linkedin.mxe.MetadataChangeProposal; import org.testng.annotations.Test; -import static com.linkedin.metadata.Constants.*; -import static org.testng.AssertJUnit.*; - - public class MutationsUtilsTest { @Test public void testBuildMetadataChangeProposal() { - MetadataChangeProposal metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithUrn( - UrnUtils.getUrn("urn:li:corpuser:datahub"), CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); - metadataChangeProposal = MutationUtils.buildMetadataChangeProposalWithKey(new CorpUserKey().setUsername("datahub"), - CORP_USER_ENTITY_NAME, CORP_USER_INFO_ASPECT_NAME, new CorpUserInfo().setActive(true)); - assertEquals(UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + MetadataChangeProposal metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithUrn( + UrnUtils.getUrn("urn:li:corpuser:datahub"), + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); + metadataChangeProposal = + MutationUtils.buildMetadataChangeProposalWithKey( + new CorpUserKey().setUsername("datahub"), + CORP_USER_ENTITY_NAME, + CORP_USER_INFO_ASPECT_NAME, + new CorpUserInfo().setActive(true)); + assertEquals( + UI_SOURCE, metadataChangeProposal.getSystemMetadata().getProperties().get(APP_SOURCE)); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java index adbc6808b5ab9e..005b47df56982e 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/utils/SystemMetadataUtilsTest.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.graphql.utils; +import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; +import static org.testng.Assert.*; + import com.linkedin.datahub.graphql.types.common.mappers.util.RunInfo; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.entity.EnvelopedAspect; @@ -8,10 +11,6 @@ import java.util.List; import org.testng.annotations.Test; -import static org.testng.Assert.*; - -import static com.linkedin.metadata.Constants.DEFAULT_RUN_ID; - public class SystemMetadataUtilsTest { private final Long recentLastObserved = 1660056070640L; @@ -21,15 +20,21 @@ public class SystemMetadataUtilsTest { @Test public void testGetLastIngestedTime() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertEquals(lastObserved, mediumLastObserved); @@ -38,15 +43,21 @@ public void testGetLastIngestedTime() { @Test public void testGetLastIngestedRunId() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); String lastRunId = SystemMetadataUtils.getLastIngestedRunId(aspectMap); assertEquals(lastRunId, "real-id-1"); @@ -55,15 +66,21 @@ public void testGetLastIngestedRunId() { @Test public void testGetLastIngestedRuns() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("real-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved) - )); - aspectMap.put("real-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "real-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-1").setLastObserved(mediumLastObserved))); + aspectMap.put( + "real-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId("real-id-2").setLastObserved(distantLastObserved))); List runs = SystemMetadataUtils.getLastIngestionRuns(aspectMap); @@ -75,15 +92,23 @@ public void testGetLastIngestedRuns() { @Test public void testGetLastIngestedTimeAllDefaultRunIds() { EnvelopedAspectMap aspectMap = new EnvelopedAspectMap(); - aspectMap.put("default-run-id", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved) - )); - aspectMap.put("default-run-id2", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved) - )); - aspectMap.put("default-run-id3", new EnvelopedAspect().setSystemMetadata( - new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(distantLastObserved) - )); + aspectMap.put( + "default-run-id", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(recentLastObserved))); + aspectMap.put( + "default-run-id2", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(mediumLastObserved))); + aspectMap.put( + "default-run-id3", + new EnvelopedAspect() + .setSystemMetadata( + new SystemMetadata() + .setRunId(DEFAULT_RUN_ID) + .setLastObserved(distantLastObserved))); Long lastObserved = SystemMetadataUtils.getLastIngestedTime(aspectMap); assertNull(lastObserved, null); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java index c42e1bb7f92e05..d3aea2a3dac12a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/Upgrade.java @@ -1,30 +1,19 @@ package com.linkedin.datahub.upgrade; import com.google.common.collect.ImmutableList; - import java.util.List; - -/** - * Specification of an upgrade to be performed to the DataHub platform. - */ +/** Specification of an upgrade to be performed to the DataHub platform. */ public interface Upgrade { - /** - * String identifier for the upgrade. - */ + /** String identifier for the upgrade. */ String id(); - /** - * Returns a set of steps to perform during the upgrade. - */ + /** Returns a set of steps to perform during the upgrade. */ List steps(); - /** - * Returns a set of steps to perform on upgrade success, failure, or abort. - */ + /** Returns a set of steps to perform on upgrade success, failure, or abort. */ default List cleanupSteps() { return ImmutableList.of(); } - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java index bf356c60a21a4a..6da656020edf84 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCleanupStep.java @@ -2,21 +2,15 @@ import java.util.function.BiConsumer; - /** * Step executed on finish of an {@link Upgrade}. * - * Note that this step is not retried, even in case of failures. + *

Note that this step is not retried, even in case of failures. */ public interface UpgradeCleanupStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the cleanup step's logic. - */ + /** Returns a function representing the cleanup step's logic. */ BiConsumer executable(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java index e6be6905acceea..eee27096e22388 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCli.java @@ -1,14 +1,14 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.SystemUpdate; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeManager; -import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import com.linkedin.datahub.upgrade.nocode.NoCodeUpgrade; import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.datahub.upgrade.removeunknownaspects.RemoveUnknownAspects; import com.linkedin.datahub.upgrade.restorebackup.RestoreBackup; import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.SystemUpdate; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.CleanIndices; import java.util.List; import javax.inject.Inject; import javax.inject.Named; @@ -17,7 +17,6 @@ import org.springframework.stereotype.Component; import picocli.CommandLine; - @Slf4j @Component public class UpgradeCli implements CommandLineRunner { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java index 53a5c0758f3189..909ceeb8f3bab4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeCliApplication.java @@ -8,18 +8,23 @@ import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.FilterType; - @SuppressWarnings("checkstyle:HideUtilityClassConstructor") @SpringBootApplication(exclude = {ElasticsearchRestClientAutoConfiguration.class}) -@ComponentScan(basePackages = { - "com.linkedin.gms.factory", - "com.linkedin.datahub.upgrade.config", - "com.linkedin.metadata.dao.producer" -}, excludeFilters = { - @ComponentScan.Filter(type = FilterType.ASSIGNABLE_TYPE, classes = ScheduledAnalyticsFactory.class) -}) +@ComponentScan( + basePackages = { + "com.linkedin.gms.factory", + "com.linkedin.datahub.upgrade.config", + "com.linkedin.metadata.dao.producer" + }, + excludeFilters = { + @ComponentScan.Filter( + type = FilterType.ASSIGNABLE_TYPE, + classes = ScheduledAnalyticsFactory.class) + }) public class UpgradeCliApplication { public static void main(String[] args) { - new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class).web(WebApplicationType.NONE).run(args); + new SpringApplicationBuilder(UpgradeCliApplication.class, UpgradeCli.class) + .web(WebApplicationType.NONE) + .run(args); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java index 76cfc6321adfdb..25a3d44b6e9da6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeContext.java @@ -4,35 +4,21 @@ import java.util.Map; import java.util.Optional; - -/** - * Context about a currently running upgrade. - */ +/** Context about a currently running upgrade. */ public interface UpgradeContext { - /** - * Returns the currently running upgrade. - */ + /** Returns the currently running upgrade. */ Upgrade upgrade(); - /** - * Returns the results from steps that have been completed. - */ + /** Returns the results from steps that have been completed. */ List stepResults(); - /** - * Returns a report object where human-readable messages can be logged. - */ + /** Returns a report object where human-readable messages can be logged. */ UpgradeReport report(); - /** - * Returns a list of raw arguments that have been provided as input to the upgrade. - */ + /** Returns a list of raw arguments that have been provided as input to the upgrade. */ List args(); - /** - * Returns a map of argument to <>optional value, as delimited by an '=' character. - */ + /** Returns a map of argument to <>optional value, as delimited by an '=' character. */ Map> parsedArgs(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java index 927ccc05783084..c01aca12254a3f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeManager.java @@ -2,20 +2,12 @@ import java.util.List; - -/** - * Responsible for managing the execution of an {@link Upgrade}. - */ +/** Responsible for managing the execution of an {@link Upgrade}. */ public interface UpgradeManager { - /** - * Register an {@link Upgrade} with the manaager. - */ + /** Register an {@link Upgrade} with the manaager. */ void register(Upgrade upgrade); - /** - * Kick off an {@link Upgrade} by identifier. - */ + /** Kick off an {@link Upgrade} by identifier. */ UpgradeResult execute(String upgradeId, List args); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java index 2ed3f105a4edaa..1c677f6fe8578f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeReport.java @@ -2,25 +2,15 @@ import java.util.List; - -/** - * A human-readable record of upgrade progress + status. - */ +/** A human-readable record of upgrade progress + status. */ public interface UpgradeReport { - /** - * Adds a new line to the upgrade report. - */ + /** Adds a new line to the upgrade report. */ void addLine(String line); - /** - * Adds a new line to the upgrade report with exception - */ + /** Adds a new line to the upgrade report with exception */ void addLine(String line, Exception e); - /** - * Retrieves the lines in the report. - */ + /** Retrieves the lines in the report. */ List lines(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java index cdb94f0c0bba1a..25dc758575fd16 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeResult.java @@ -1,36 +1,21 @@ package com.linkedin.datahub.upgrade; -/** - * Represents the result of executing an {@link Upgrade} - */ +/** Represents the result of executing an {@link Upgrade} */ public interface UpgradeResult { - /** - * The execution result. - */ + /** The execution result. */ enum Result { - /** - * Upgrade succeeded. - */ + /** Upgrade succeeded. */ SUCCEEDED, - /** - * Upgrade failed. - */ + /** Upgrade failed. */ FAILED, - /** - * Upgrade was aborted. - */ + /** Upgrade was aborted. */ ABORTED } - /** - * Returns the {@link Result} of executing an {@link Upgrade} - */ + /** Returns the {@link Result} of executing an {@link Upgrade} */ Result result(); - /** - * Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. - */ + /** Returns the {@link UpgradeReport} associated with the completed {@link Upgrade}. */ UpgradeReport report(); - } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java index b85bd7a51e3dd5..3f90dcb33a0052 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStep.java @@ -2,39 +2,29 @@ import java.util.function.Function; - -/** - * Represents a single executable step in an {@link Upgrade}. - */ +/** Represents a single executable step in an {@link Upgrade}. */ public interface UpgradeStep { - /** - * Returns an identifier for the upgrade step. - */ + /** Returns an identifier for the upgrade step. */ String id(); - /** - * Returns a function representing the step's execution logic. - */ + /** Returns a function representing the step's execution logic. */ Function executable(); - /** - * Returns the number of times the step should be retried. - */ + /** Returns the number of times the step should be retried. */ default int retryCount() { return 0; } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ default boolean isOptional() { return false; } - /** - * Returns whether or not to skip the step based on the UpgradeContext - */ + /** Returns whether or not to skip the step based on the UpgradeContext */ default boolean skip(UpgradeContext context) { return false; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java index 60d51f9ba476cc..04b3d4b8559e67 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeStepResult.java @@ -2,52 +2,33 @@ public interface UpgradeStepResult { - /** - * Returns a string identifier associated with the step. - */ + /** Returns a string identifier associated with the step. */ String stepId(); - /** - * The outcome of the step execution. - */ + /** The outcome of the step execution. */ enum Result { - /** - * The step succeeded. - */ + /** The step succeeded. */ SUCCEEDED, - /** - * The step failed. - */ + /** The step failed. */ FAILED } - /** - * A control-flow action to perform as a result of the step execution. - */ + /** A control-flow action to perform as a result of the step execution. */ enum Action { - /** - * Continue attempting the upgrade. - */ + /** Continue attempting the upgrade. */ CONTINUE, - /** - * Immediately fail the upgrade, without retry. - */ + /** Immediately fail the upgrade, without retry. */ FAIL, - /** - * Immediately abort the upgrade, without retry. - */ + /** Immediately abort the upgrade, without retry. */ ABORT } - /** - * Returns the result of executing the step, either success or failure. - */ + /** Returns the result of executing the step, either success or failure. */ Result result(); - /** - * Returns the action to perform after executing the step, either continue or abort. - */ + /** Returns the action to perform after executing the step, either continue or abort. */ default Action action() { return Action.CONTINUE; - }; + } + ; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java index a6f3ef55604424..8d5f1118433fc0 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/UpgradeUtils.java @@ -19,10 +19,12 @@ public static Map> parseArgs(final List args) { for (final String arg : args) { List parsedArg = Arrays.asList(arg.split(KEY_VALUE_DELIMITER, 2)); - parsedArgs.put(parsedArg.get(0), parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); + parsedArgs.put( + parsedArg.get(0), + parsedArg.size() > 1 ? Optional.of(parsedArg.get(1)) : Optional.empty()); } return parsedArgs; } - private UpgradeUtils() { } + private UpgradeUtils() {} } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java index 4f980b11b888a5..393b5411599adc 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearGraphServiceStep.java @@ -8,7 +8,6 @@ import com.linkedin.metadata.graph.GraphService; import java.util.function.Function; - public class ClearGraphServiceStep implements UpgradeStep { private final String deletePattern = ".*"; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java index fca8f60aefd95b..230f5a60cb9ff7 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/ClearSearchServiceStep.java @@ -8,13 +8,13 @@ import com.linkedin.metadata.search.EntitySearchService; import java.util.function.Function; - public class ClearSearchServiceStep implements UpgradeStep { private final EntitySearchService _entitySearchService; private final boolean _alwaysRun; - public ClearSearchServiceStep(final EntitySearchService entitySearchService, final boolean alwaysRun) { + public ClearSearchServiceStep( + final EntitySearchService entitySearchService, final boolean alwaysRun) { _entitySearchService = entitySearchService; _alwaysRun = alwaysRun; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java index 270aa11c7b0701..dd6c3fd1e44aa6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSDisableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSDisableWriteModeStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java index 8df02123983e8b..8a0d374d6ee3e6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSEnableWriteModeStep.java @@ -8,7 +8,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; - @RequiredArgsConstructor public class GMSEnableWriteModeStep implements UpgradeStep { private final SystemRestliEntityClient _entityClient; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java index 1391ef685c335d..4e7447cb1e2cb6 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/common/steps/GMSQualificationStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.common.steps; +import static com.linkedin.metadata.Constants.*; + import com.fasterxml.jackson.core.StreamReadConstraints; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -18,9 +20,6 @@ import java.util.function.Function; import lombok.RequiredArgsConstructor; -import static com.linkedin.metadata.Constants.*; - - @RequiredArgsConstructor public class GMSQualificationStep implements UpgradeStep { @@ -70,9 +69,16 @@ private boolean isEligible(ObjectNode configJson) { @Override public Function executable() { return (context) -> { - String gmsHost = System.getenv("DATAHUB_GMS_HOST") == null ? "localhost" : System.getenv("DATAHUB_GMS_HOST"); - String gmsPort = System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); - String gmsProtocol = System.getenv("DATAHUB_GMS_PROTOCOL") == null ? "http" : System.getenv("DATAHUB_GMS_PROTOCOL"); + String gmsHost = + System.getenv("DATAHUB_GMS_HOST") == null + ? "localhost" + : System.getenv("DATAHUB_GMS_HOST"); + String gmsPort = + System.getenv("DATAHUB_GMS_PORT") == null ? "8080" : System.getenv("DATAHUB_GMS_PORT"); + String gmsProtocol = + System.getenv("DATAHUB_GMS_PROTOCOL") == null + ? "http" + : System.getenv("DATAHUB_GMS_PROTOCOL"); try { String spec = String.format("%s://%s:%s/config", gmsProtocol, gmsHost, gmsPort); @@ -81,33 +87,37 @@ public Function executable() { String responseString = convertStreamToString(response); ObjectMapper mapper = new ObjectMapper(); - int maxSize = Integer.parseInt(System.getenv().getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, - MAX_JACKSON_STRING_SIZE)); - mapper.getFactory().setStreamReadConstraints(StreamReadConstraints.builder() - .maxStringLength(maxSize).build()); + int maxSize = + Integer.parseInt( + System.getenv() + .getOrDefault(INGESTION_MAX_SERIALIZED_STRING_LENGTH, MAX_JACKSON_STRING_SIZE)); + mapper + .getFactory() + .setStreamReadConstraints( + StreamReadConstraints.builder().maxStringLength(maxSize).build()); JsonNode configJson = mapper.readTree(responseString); if (isEligible((ObjectNode) configJson)) { - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } else { - context.report().addLine(String.format("Failed to qualify GMS. It is not running on the latest version." - + "Re-run GMS on the latest datahub release")); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "Failed to qualify GMS. It is not running on the latest version." + + "Re-run GMS on the latest datahub release")); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } catch (Exception e) { e.printStackTrace(); - context.report().addLine(String.format("ERROR: Cannot connect to GMS" - + "at %s://host %s port %s. Make sure GMS is on the latest version " - + "and is running at that host before starting the migration.", - gmsProtocol, - gmsHost, - gmsPort)); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + context + .report() + .addLine( + String.format( + "ERROR: Cannot connect to GMS" + + "at %s://host %s port %s. Make sure GMS is on the latest version " + + "and is running at that host before starting the migration.", + gmsProtocol, gmsHost, gmsPort)); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java index 16e5e4247267f1..abd144bf453ed8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BackfillBrowsePathsV2Config.java @@ -6,12 +6,12 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BackfillBrowsePathsV2Config { @Bean - public BackfillBrowsePathsV2 backfillBrowsePathsV2(EntityService entityService, SearchService searchService) { + public BackfillBrowsePathsV2 backfillBrowsePathsV2( + EntityService entityService, SearchService searchService) { return new BackfillBrowsePathsV2(entityService, searchService); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java index e98f0dc2093f62..1e9298bc60612d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/BuildIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class BuildIndicesConfig { @Bean(name = "buildIndices") - public BuildIndices buildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public BuildIndices buildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new BuildIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new BuildIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java index 558c9780911ac5..5bd7244a92e45a 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/CleanIndicesConfig.java @@ -10,16 +10,24 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class CleanIndicesConfig { @Bean(name = "cleanIndices") - public CleanIndices cleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + public CleanIndices cleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - return new CleanIndices(systemMetadataService, timeseriesAspectService, entitySearchService, graphService, - baseElasticSearchComponents, configurationProvider); + return new CleanIndices( + systemMetadataService, + timeseriesAspectService, + entitySearchService, + graphService, + baseElasticSearchComponents, + configurationProvider); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java index 23ea81009fa1da..5ba5c8a90fd4ac 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeCleanupConfig.java @@ -1,28 +1,35 @@ package com.linkedin.datahub.upgrade.config; +import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; + import com.linkedin.datahub.upgrade.nocodecleanup.NoCodeCleanupUpgrade; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.utils.elasticsearch.IndexConvention; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.opensearch.client.RestHighLevelClient; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; -import static com.linkedin.gms.factory.common.IndexConventionFactory.INDEX_CONVENTION_BEAN; - - +@Slf4j @Configuration public class NoCodeCleanupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeCleanup") - @DependsOn({"ebeanServer", "graphService", "elasticSearchRestHighLevelClient", INDEX_CONVENTION_BEAN}) + @DependsOn({ + "ebeanServer", + "graphService", + "elasticSearchRestHighLevelClient", + INDEX_CONVENTION_BEAN + }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeCleanupUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); @@ -31,4 +38,12 @@ public NoCodeCleanupUpgrade createInstance() { final IndexConvention indexConvention = applicationContext.getBean(IndexConvention.class); return new NoCodeCleanupUpgrade(ebeanServer, graphClient, searchClient, indexConvention); } + + @Bean(name = "noCodeCleanup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeCleanupUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeCleanupUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java index cd264e529e9a57..d968e8521867e8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/NoCodeUpgradeConfig.java @@ -6,28 +6,39 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - +@Slf4j @Configuration public class NoCodeUpgradeConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "noCodeUpgrade") @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public NoCodeUpgrade createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); return new NoCodeUpgrade(ebeanServer, entityService, entityRegistry, entityClient); } + + @Bean(name = "noCodeUpgrade") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public NoCodeUpgrade createNotImplInstance() { + log.warn("NoCode is not supported for cassandra!"); + return new NoCodeUpgrade(null, null, null, null); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java index cdc739efc416dd..0b46133209382b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RemoveUnknownAspectsConfig.java @@ -5,7 +5,6 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Configuration public class RemoveUnknownAspectsConfig { @Bean(name = "removeUnknownAspects") diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java index 97a08800534de8..116d62878f5c6e 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreBackupConfig.java @@ -8,31 +8,48 @@ import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - +@Slf4j @Configuration public class RestoreBackupConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreBackup") - @DependsOn({"ebeanServer", "entityService", "systemRestliEntityClient", "graphService", - "searchService", "entityRegistry"}) + @DependsOn({ + "ebeanServer", + "entityService", + "systemRestliEntityClient", + "graphService", + "searchService", + "entityRegistry" + }) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreBackup createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final SystemRestliEntityClient entityClient = applicationContext.getBean(SystemRestliEntityClient.class); + final SystemRestliEntityClient entityClient = + applicationContext.getBean(SystemRestliEntityClient.class); final GraphService graphClient = applicationContext.getBean(GraphService.class); final EntitySearchService searchClient = applicationContext.getBean(EntitySearchService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreBackup(ebeanServer, entityService, entityRegistry, entityClient, - graphClient, searchClient); + return new RestoreBackup( + ebeanServer, entityService, entityRegistry, entityClient, graphClient, searchClient); + } + + @Bean(name = "restoreBackup") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreBackup createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreBackup(null, null, null, null, null, null); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java index 663cad4a4bff63..9d229f315d709d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/RestoreIndicesConfig.java @@ -7,29 +7,40 @@ import com.linkedin.metadata.search.EntitySearchService; import io.ebean.Database; import javax.annotation.Nonnull; +import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.ApplicationContext; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.DependsOn; - +@Slf4j @Configuration public class RestoreIndicesConfig { - @Autowired - ApplicationContext applicationContext; + @Autowired ApplicationContext applicationContext; @Bean(name = "restoreIndices") @DependsOn({"ebeanServer", "entityService", "searchService", "graphService", "entityRegistry"}) + @ConditionalOnProperty(name = "entityService.impl", havingValue = "ebean", matchIfMissing = true) @Nonnull public RestoreIndices createInstance() { final Database ebeanServer = applicationContext.getBean(Database.class); final EntityService entityService = applicationContext.getBean(EntityService.class); - final EntitySearchService entitySearchService = applicationContext.getBean(EntitySearchService.class); + final EntitySearchService entitySearchService = + applicationContext.getBean(EntitySearchService.class); final GraphService graphService = applicationContext.getBean(GraphService.class); final EntityRegistry entityRegistry = applicationContext.getBean(EntityRegistry.class); - return new RestoreIndices(ebeanServer, entityService, entityRegistry, entitySearchService, - graphService); + return new RestoreIndices( + ebeanServer, entityService, entityRegistry, entitySearchService, graphService); + } + + @Bean(name = "restoreIndices") + @ConditionalOnProperty(name = "entityService.impl", havingValue = "cassandra") + @Nonnull + public RestoreIndices createNotImplInstance() { + log.warn("restoreIndices is not supported for cassandra!"); + return new RestoreIndices(null, null, null, null, null); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java index 9848fc7a0008f7..3b63d81486eb4b 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/config/SystemUpdateConfig.java @@ -24,18 +24,21 @@ import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; - @Slf4j @Configuration public class SystemUpdateConfig { @Bean(name = "systemUpdate") - public SystemUpdate systemUpdate(final BuildIndices buildIndices, final CleanIndices cleanIndices, - @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, - final GitVersion gitVersion, @Qualifier("revision") String revision, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + public SystemUpdate systemUpdate( + final BuildIndices buildIndices, + final CleanIndices cleanIndices, + @Qualifier("duheKafkaEventProducer") final KafkaEventProducer kafkaEventProducer, + final GitVersion gitVersion, + @Qualifier("revision") String revision, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { String version = String.format("%s-%s", gitVersion.getVersion(), revision); - return new SystemUpdate(buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); + return new SystemUpdate( + buildIndices, cleanIndices, kafkaEventProducer, version, backfillBrowsePathsV2); } @Value("#{systemEnvironment['DATAHUB_REVISION'] ?: '0'}") @@ -50,16 +53,18 @@ public String getRevision() { @Qualifier(TopicConventionFactory.TOPIC_CONVENTION_BEAN) private TopicConvention topicConvention; - @Autowired - private KafkaHealthChecker kafkaHealthChecker; + @Autowired private KafkaHealthChecker kafkaHealthChecker; @Bean(name = "duheKafkaEventProducer") - protected KafkaEventProducer duheKafkaEventProducer(@Qualifier("configurationProvider") ConfigurationProvider provider, - KafkaProperties properties, - @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { + protected KafkaEventProducer duheKafkaEventProducer( + @Qualifier("configurationProvider") ConfigurationProvider provider, + KafkaProperties properties, + @Qualifier("duheSchemaRegistryConfig") SchemaRegistryConfig duheSchemaRegistryConfig) { KafkaConfiguration kafkaConfiguration = provider.getKafka(); - Producer producer = new KafkaProducer<>( - DataHubKafkaProducerFactory.buildProducerProperties(duheSchemaRegistryConfig, kafkaConfiguration, properties)); + Producer producer = + new KafkaProducer<>( + DataHubKafkaProducerFactory.buildProducerProperties( + duheSchemaRegistryConfig, kafkaConfiguration, properties)); return new KafkaEventProducer(producer, topicConvention, kafkaHealthChecker); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java index 972b55f2001f12..6cc94fbed5bf31 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeContext.java @@ -9,7 +9,6 @@ import java.util.Map; import java.util.Optional; - public class DefaultUpgradeContext implements UpgradeContext { private final Upgrade _upgrade; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java index a642ee3fb0a90a..623c8a71e861d8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeManager.java @@ -17,7 +17,6 @@ import java.util.Map; import javax.annotation.Nonnull; - public class DefaultUpgradeManager implements UpgradeManager { private final Map _upgrades = new HashMap<>(); @@ -32,16 +31,19 @@ public UpgradeResult execute(String upgradeId, List args) { if (_upgrades.containsKey(upgradeId)) { return executeInternal(_upgrades.get(upgradeId), args); } - throw new IllegalArgumentException(String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); + throw new IllegalArgumentException( + String.format("No upgrade with id %s could be found. Aborting...", upgradeId)); } private UpgradeResult executeInternal(Upgrade upgrade, List args) { final UpgradeReport upgradeReport = new DefaultUpgradeReport(); - final UpgradeContext context = new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); + final UpgradeContext context = + new DefaultUpgradeContext(upgrade, upgradeReport, new ArrayList<>(), args); upgradeReport.addLine(String.format("Starting upgrade with id %s...", upgrade.id())); UpgradeResult result = executeInternal(context); upgradeReport.addLine( - String.format("Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); + String.format( + "Upgrade %s completed with result %s. Exiting...", upgrade.id(), result.result())); executeCleanupInternal(context, result); return result; } @@ -58,12 +60,16 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (step.skip(context)) { upgradeReport.addLine( - String.format(String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Skipping Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); continue; } upgradeReport.addLine( - String.format(String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), upgrade.id())); + String.format( + String.format("Executing Step %s/%s: %s...", i + 1, steps.size(), step.id()), + upgrade.id())); final UpgradeStepResult stepResult = executeStepInternal(context, step); stepResults.add(stepResult); @@ -71,7 +77,8 @@ private UpgradeResult executeInternal(UpgradeContext context) { // Apply Actions if (UpgradeStepResult.Action.ABORT.equals(stepResult.action())) { upgradeReport.addLine( - String.format("Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", + String.format( + "Step with id %s requested an abort of the in-progress update. Aborting the upgrade...", step.id())); return new DefaultUpgradeResult(UpgradeResult.Result.ABORTED, upgradeReport); } @@ -80,23 +87,27 @@ private UpgradeResult executeInternal(UpgradeContext context) { if (UpgradeStepResult.Result.FAILED.equals(stepResult.result())) { if (step.isOptional()) { upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", i + 1, - steps.size(), step.id())); + String.format( + "Failed Step %s/%s: %s. Step marked as optional. Proceeding with upgrade...", + i + 1, steps.size(), step.id())); continue; } // Required step failed. Fail the entire upgrade process. upgradeReport.addLine( - String.format("Failed Step %s/%s: %s. Failed after %s retries.", i + 1, steps.size(), step.id(), - step.retryCount())); + String.format( + "Failed Step %s/%s: %s. Failed after %s retries.", + i + 1, steps.size(), step.id(), step.retryCount())); upgradeReport.addLine(String.format("Exiting upgrade %s with failure.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.FAILED, upgradeReport); } - upgradeReport.addLine(String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); + upgradeReport.addLine( + String.format("Completed Step %s/%s: %s successfully.", i + 1, steps.size(), step.id())); } - upgradeReport.addLine(String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); + upgradeReport.addLine( + String.format("Success! Completed upgrade with id %s successfully.", upgrade.id())); return new DefaultUpgradeResult(UpgradeResult.Result.SUCCEEDED, upgradeReport); } @@ -105,15 +116,19 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte UpgradeStepResult result = null; int maxAttempts = retryCount + 1; for (int i = 0; i < maxAttempts; i++) { - try (Timer.Context completionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { - try (Timer.Context executionTimer = MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { + try (Timer.Context completionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "completionTime")).time()) { + try (Timer.Context executionTimer = + MetricUtils.timer(MetricRegistry.name(step.id(), "executionTime")).time()) { result = step.executable().apply(context); } if (result == null) { // Failed to even retrieve a result. Create a default failure result. result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); - context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); + context + .report() + .addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); MetricUtils.counter(MetricRegistry.name(step.id(), "retry")).inc(); } @@ -122,9 +137,11 @@ private UpgradeStepResult executeStepInternal(UpgradeContext context, UpgradeSte break; } } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); + String.format( + "Caught exception during attempt %s of Step with id %s: %s", i, step.id(), e)); MetricUtils.counter(MetricRegistry.name(step.id(), "failed")).inc(); result = new DefaultUpgradeStepResult(step.id(), UpgradeStepResult.Result.FAILED); context.report().addLine(String.format("Retrying %s more times...", maxAttempts - (i + 1))); @@ -139,7 +156,11 @@ private void executeCleanupInternal(UpgradeContext context, UpgradeResult result try { step.executable().accept(context, result); } catch (Exception e) { - context.report().addLine(String.format("Caught exception while executing cleanup step with id %s", step.id())); + context + .report() + .addLine( + String.format( + "Caught exception while executing cleanup step with id %s", step.id())); } } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java index 19706937e20ca8..913b0ff20e6ff9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeReport.java @@ -1,10 +1,9 @@ package com.linkedin.datahub.upgrade.impl; import com.linkedin.datahub.upgrade.UpgradeReport; -import lombok.extern.slf4j.Slf4j; import java.util.ArrayList; import java.util.List; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class DefaultUpgradeReport implements UpgradeReport { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java index 6ecb5228482917..cf0e7221b406b0 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeResult.java @@ -3,7 +3,6 @@ import com.linkedin.datahub.upgrade.UpgradeReport; import com.linkedin.datahub.upgrade.UpgradeResult; - public class DefaultUpgradeResult implements UpgradeResult { private final Result _result; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java index d0c086f607edd2..e11eaf89bfc8d2 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/impl/DefaultUpgradeStepResult.java @@ -2,7 +2,6 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; - public class DefaultUpgradeStepResult implements UpgradeStepResult { private final String _stepId; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java index 7ed7169bf20bcc..3b3098f43c4734 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/CreateAspectTableStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import io.ebean.Database; import java.util.function.Function; @@ -36,40 +36,42 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - - DbType targetDbType = context.parsedArgs().containsKey(DB_TYPE_ARG) - ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) - : DbType.MYSQL; + DbType targetDbType = + context.parsedArgs().containsKey(DB_TYPE_ARG) + ? DbType.valueOf(context.parsedArgs().get(DB_TYPE_ARG).get()) + : DbType.MYSQL; String sqlUpdateStr; switch (targetDbType) { case POSTGRES: - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint not null,\n" - + " metadata text not null,\n" - + " systemmetadata text,\n" - + " createdon timestamp not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint not null,\n" + + " metadata text not null,\n" + + " systemmetadata text,\n" + + " createdon timestamp not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; default: // both mysql and maria - sqlUpdateStr = "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" - + " urn varchar(500) not null,\n" - + " aspect varchar(200) not null,\n" - + " version bigint(20) not null,\n" - + " metadata longtext not null,\n" - + " systemmetadata longtext,\n" - + " createdon datetime(6) not null,\n" - + " createdby varchar(255) not null,\n" - + " createdfor varchar(255),\n" - + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" - + ")"; + sqlUpdateStr = + "CREATE TABLE IF NOT EXISTS metadata_aspect_v2 (\n" + + " urn varchar(500) not null,\n" + + " aspect varchar(200) not null,\n" + + " version bigint(20) not null,\n" + + " metadata longtext not null,\n" + + " systemmetadata longtext,\n" + + " createdon datetime(6) not null,\n" + + " createdby varchar(255) not null,\n" + + " createdfor varchar(255),\n" + + " constraint pk_metadata_aspect_v2 primary key (urn,aspect,version)\n" + + ")"; break; } @@ -77,9 +79,7 @@ public Function executable() { _server.execute(_server.createSqlUpdate(sqlUpdateStr)); } catch (Exception e) { context.report().addLine("Failed to create table metadata_aspect_v2", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java index 1b5770a11ff62c..ac56e5e91c72be 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/DataMigrationStep.java @@ -1,22 +1,22 @@ package com.linkedin.datahub.upgrade.nocode; +import com.datahub.util.RecordUtils; import com.linkedin.common.AuditStamp; import com.linkedin.common.BrowsePaths; import com.linkedin.common.urn.Urn; import com.linkedin.data.template.RecordTemplate; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.metadata.models.AspectSpec; -import com.linkedin.metadata.models.registry.EntityRegistry; -import com.linkedin.metadata.utils.PegasusUtils; -import com.datahub.util.RecordUtils; import com.linkedin.metadata.entity.ebean.EbeanAspectV1; import com.linkedin.metadata.entity.ebean.EbeanAspectV2; +import com.linkedin.metadata.models.AspectSpec; import com.linkedin.metadata.models.EntitySpec; +import com.linkedin.metadata.models.registry.EntityRegistry; +import com.linkedin.metadata.utils.PegasusUtils; import com.linkedin.util.Pair; import io.ebean.Database; import io.ebean.PagedList; @@ -29,13 +29,13 @@ import java.util.concurrent.TimeUnit; import java.util.function.Function; - public class DataMigrationStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; private static final long DEFAULT_BATCH_DELAY_MS = 250; - private static final String BROWSE_PATHS_ASPECT_NAME = PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); + private static final String BROWSE_PATHS_ASPECT_NAME = + PegasusUtils.getAspectNameFromSchema(new BrowsePaths().schema()); private final Database _server; private final EntityService _entityService; @@ -64,7 +64,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - context.report().addLine("Starting data migration..."); final int rowCount = _server.find(EbeanAspectV1.class).findCount(); context.report().addLine(String.format("Found %s rows in legacy aspects table", rowCount)); @@ -74,7 +73,11 @@ public Function executable() { int count = getBatchSize(context.parsedArgs()); while (start < rowCount) { - context.report().addLine(String.format("Reading rows %s through %s from legacy aspects table.", start, start + count)); + context + .report() + .addLine( + String.format( + "Reading rows %s through %s from legacy aspects table.", start, start + count)); PagedList rows = getPagedAspects(start, count); for (EbeanAspectV1 oldAspect : rows.getList()) { @@ -84,11 +87,18 @@ public Function executable() { // 1. Instantiate the RecordTemplate class associated with the aspect. final RecordTemplate aspectRecord; try { - aspectRecord = RecordUtils.toRecordTemplate( - Class.forName(oldAspectName).asSubclass(RecordTemplate.class), - oldAspect.getMetadata()); + aspectRecord = + RecordUtils.toRecordTemplate( + Class.forName(oldAspectName).asSubclass(RecordTemplate.class), + oldAspect.getMetadata()); } catch (Exception e) { - context.report().addLine(String.format("Failed to convert aspect with name %s into a RecordTemplate class", oldAspectName), e); + context + .report() + .addLine( + String.format( + "Failed to convert aspect with name %s into a RecordTemplate class", + oldAspectName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -97,7 +107,11 @@ public Function executable() { try { urn = Urn.createFromString(oldAspect.getKey().getUrn()); } catch (Exception e) { - throw new RuntimeException(String.format("Failed to bind Urn with value %s into Urn object", oldAspect.getKey().getUrn()), e); + throw new RuntimeException( + String.format( + "Failed to bind Urn with value %s into Urn object", + oldAspect.getKey().getUrn()), + e); } // 3. Verify that the entity associated with the aspect is found in the registry. @@ -106,7 +120,12 @@ public Function executable() { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find Entity with name %s in Entity Registry", entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -115,9 +134,13 @@ public Function executable() { try { newAspectName = PegasusUtils.getAspectNameFromSchema(aspectRecord.schema()); } catch (Exception e) { - context.report().addLine(String.format("Failed to retrieve @Aspect name from schema %s, urn %s", - aspectRecord.schema().getFullName(), - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to retrieve @Aspect name from schema %s, urn %s", + aspectRecord.schema().getFullName(), entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -126,23 +149,24 @@ public Function executable() { try { aspectSpec = entitySpec.getAspectSpec(newAspectName); } catch (Exception e) { - context.report().addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - newAspectName, - entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + newAspectName, entityName), + e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } // 6. Write the row back using the EntityService boolean emitMae = oldAspect.getKey().getVersion() == 0L; _entityService.ingestAspects( - urn, - List.of(Pair.of(newAspectName, aspectRecord)), - toAuditStamp(oldAspect), - null - ); + urn, List.of(Pair.of(newAspectName, aspectRecord)), toAuditStamp(oldAspect), null); // 7. If necessary, emit a browse path aspect. - if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) && !urnsWithBrowsePath.contains(urn)) { + if (entitySpec.getAspectSpecMap().containsKey(BROWSE_PATHS_ASPECT_NAME) + && !urnsWithBrowsePath.contains(urn)) { // Emit a browse path aspect. final BrowsePaths browsePaths; try { @@ -152,7 +176,11 @@ public Function executable() { browsePathsStamp.setActor(Urn.createFromString(Constants.SYSTEM_ACTOR)); browsePathsStamp.setTime(System.currentTimeMillis()); - _entityService.ingestAspects(urn, List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), browsePathsStamp, null); + _entityService.ingestAspects( + urn, + List.of(Pair.of(BROWSE_PATHS_ASPECT_NAME, browsePaths)), + browsePathsStamp, + null); urnsWithBrowsePath.add(urn); } catch (URISyntaxException e) { @@ -167,13 +195,17 @@ public Function executable() { try { TimeUnit.MILLISECONDS.sleep(getBatchDelayMs(context.parsedArgs())); } catch (InterruptedException e) { - throw new RuntimeException("Thread interrupted while sleeping after successful batch migration."); + throw new RuntimeException( + "Thread interrupted while sleeping after successful batch migration."); } } if (totalRowsMigrated != rowCount) { - context.report().addLine(String.format("Number of rows migrated %s does not equal the number of input rows %s...", - totalRowsMigrated, - rowCount)); + context + .report() + .addLine( + String.format( + "Number of rows migrated %s does not equal the number of input rows %s...", + totalRowsMigrated, rowCount)); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -195,9 +227,9 @@ private AuditStamp toAuditStamp(final EbeanAspectV1 aspect) { return auditStamp; } - private PagedList getPagedAspects(final int start, final int pageSize) { - return _server.find(EbeanAspectV1.class) + return _server + .find(EbeanAspectV1.class) .select(EbeanAspectV1.ALL_COLUMNS) .setFirstRow(start) .setMaxRows(pageSize) @@ -219,7 +251,8 @@ private long getBatchDelayMs(final Map> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (parsedArgs.containsKey(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME) && parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).isPresent()) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(NoCodeUpgrade.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java index a299deb8747212..674efb2b8ba78c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/NoCodeUpgrade.java @@ -13,6 +13,7 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; public class NoCodeUpgrade implements Upgrade { @@ -26,15 +27,17 @@ public class NoCodeUpgrade implements Upgrade { // Upgrade requires the Database. public NoCodeUpgrade( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient) { - _steps = buildUpgradeSteps( - server, entityService, - entityRegistry, - entityClient); - _cleanupSteps = buildCleanupSteps(); + if (server != null) { + _steps = buildUpgradeSteps(server, entityService, entityRegistry, entityClient); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java index cf8e848762f143..6180573d902d22 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/RemoveAspectV2TableStep.java @@ -7,10 +7,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class RemoveAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java index 0fe9afa8cc6f83..d22af9d2924003 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocode/UpgradeQualificationStep.java @@ -1,9 +1,9 @@ package com.linkedin.datahub.upgrade.nocode; -import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; +import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.entity.ebean.AspectStorageValidationUtil; import io.ebean.Database; import java.util.function.Function; @@ -29,7 +29,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - if (context.parsedArgs().containsKey(NoCodeUpgrade.FORCE_UPGRADE_ARG_NAME)) { context.report().addLine("Forced upgrade detected. Proceeding with upgrade..."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); @@ -43,7 +42,8 @@ public Function executable() { } // Unqualified (Table already exists) context.report().addLine("Failed to qualify upgrade candidate. Aborting the upgrade..."); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); + return new DefaultUpgradeStepResult( + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists", e); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); @@ -67,8 +67,13 @@ private boolean isQualified(Database server, UpgradeContext context) { return true; } context.report().addLine(String.format("-- V2 table has %d rows", v2TableRowCount)); - context.report().addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); - context.report().addLine("-- If V2 table has significantly less rows, consider running the forced upgrade. "); + context + .report() + .addLine("-- Since V2 table has records, we will not proceed with the upgrade. "); + context + .report() + .addLine( + "-- If V2 table has significantly less rows, consider running the forced upgrade. "); return false; } context.report().addLine("-- V2 table does not exist"); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java index 8005e31e01c671..ba0a0124545e9d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteAspectTableStep.java @@ -7,7 +7,6 @@ import io.ebean.Database; import java.util.function.Function; - // Do we need SQL-tech specific migration paths? public class DeleteAspectTableStep implements UpgradeStep { @@ -34,9 +33,7 @@ public Function executable() { _server.execute(_server.sqlUpdate("DROP TABLE IF EXISTS metadata_aspect;")); } catch (Exception e) { context.report().addLine("Failed to delete data from legacy table metadata_aspect", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java index 12ff125a05127c..5066e05f8bf5a1 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacyGraphRelationshipsStep.java @@ -6,10 +6,8 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.graph.GraphService; import com.linkedin.metadata.graph.neo4j.Neo4jGraphService; -import lombok.extern.slf4j.Slf4j; - import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; // Do we need SQL-tech specific migration paths? @Slf4j @@ -44,9 +42,7 @@ public Function executable() { } } catch (Exception e) { context.report().addLine("Failed to delete legacy data from graph", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java index 9a64d5fe1810c9..05656373377b93 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/DeleteLegacySearchIndicesStep.java @@ -11,7 +11,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; - // Do we need SQL-tech specific migration paths? @RequiredArgsConstructor public class DeleteLegacySearchIndicesStep implements UpgradeStep { @@ -20,7 +19,8 @@ public class DeleteLegacySearchIndicesStep implements UpgradeStep { private final RestHighLevelClient _searchClient; - public DeleteLegacySearchIndicesStep(final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + public DeleteLegacySearchIndicesStep( + final RestHighLevelClient searchClient, final IndexConvention indexConvention) { _searchClient = searchClient; deletePattern = indexConvention.getPrefix().map(p -> p + "_").orElse("") + "*document*"; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java index a5d8d6ce9b666f..6d3125423b4433 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeCleanupUpgrade.java @@ -9,19 +9,27 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; +import javax.annotation.Nullable; import org.opensearch.client.RestHighLevelClient; - public class NoCodeCleanupUpgrade implements Upgrade { private final List _steps; private final List _cleanupSteps; // Upgrade requires the Database. - public NoCodeCleanupUpgrade(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { - _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); - _cleanupSteps = buildCleanupSteps(); + public NoCodeCleanupUpgrade( + @Nullable final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { + if (server != null) { + _steps = buildUpgradeSteps(server, graphClient, searchClient, indexConvention); + _cleanupSteps = buildCleanupSteps(); + } else { + _steps = List.of(); + _cleanupSteps = List.of(); + } } @Override @@ -43,8 +51,11 @@ private List buildCleanupSteps() { return Collections.emptyList(); } - private List buildUpgradeSteps(final Database server, final GraphService graphClient, - final RestHighLevelClient searchClient, final IndexConvention indexConvention) { + private List buildUpgradeSteps( + final Database server, + final GraphService graphClient, + final RestHighLevelClient searchClient, + final IndexConvention indexConvention) { final List steps = new ArrayList<>(); steps.add(new NoCodeUpgradeQualificationStep(server)); steps.add(new DeleteAspectTableStep(server)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java index 67a226f8f0676c..15c7584532e2ca 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/nocodecleanup/NoCodeUpgradeQualificationStep.java @@ -8,7 +8,6 @@ import io.ebean.Database; import java.util.function.Function; - public class NoCodeUpgradeQualificationStep implements UpgradeStep { private final Database _server; @@ -33,23 +32,19 @@ public Function executable() { try { if (!AspectStorageValidationUtil.checkV2TableExists(_server)) { // Unqualified (V2 Table does not exist) - context.report().addLine("You have not successfully migrated yet. Aborting the cleanup..."); + context + .report() + .addLine("You have not successfully migrated yet. Aborting the cleanup..."); return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED, - UpgradeStepResult.Action.ABORT); + id(), UpgradeStepResult.Result.SUCCEEDED, UpgradeStepResult.Action.ABORT); } else { // Qualified. context.report().addLine("Found qualified upgrade candidate. Proceeding with upgrade..."); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.SUCCEEDED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); } } catch (Exception e) { context.report().addLine("Failed to check if metadata_aspect_v2 table exists: %s", e); - return new DefaultUpgradeStepResult( - id(), - UpgradeStepResult.Result.FAILED); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } }; } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java index b55d439745e691..7e55dcddc639f4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveClientIdAspectStep.java @@ -11,7 +11,6 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class RemoveClientIdAspectStep implements UpgradeStep { @@ -33,9 +32,10 @@ public boolean skip(UpgradeContext context) { @Override public Function executable() { return upgradeContext -> { - _entityService.deleteAspect(TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, - new HashMap<>(), true); - return (UpgradeStepResult) new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + _entityService.deleteAspect( + TelemetryUtils.CLIENT_ID_URN, INVALID_CLIENT_ID_ASPECT, new HashMap<>(), true); + return (UpgradeStepResult) + new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java index f8af69dba08653..dc95b7605ef88f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/removeunknownaspects/RemoveUnknownAspects.java @@ -8,7 +8,6 @@ import java.util.ArrayList; import java.util.List; - public class RemoveUnknownAspects implements Upgrade { private final List _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java index 0303739e62afec..addf6dcb89c1ae 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/ClearAspectV2TableStep.java @@ -8,10 +8,7 @@ import io.ebean.Database; import java.util.function.Function; - -/** - * Optional step for removing Aspect V2 table. - */ +/** Optional step for removing Aspect V2 table. */ public class ClearAspectV2TableStep implements UpgradeStep { private final Database _server; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java index 9175ad606e3c87..4ac295b4fdfb75 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreBackup.java @@ -16,20 +16,26 @@ import io.ebean.Database; import java.util.ArrayList; import java.util.List; - +import javax.annotation.Nullable; public class RestoreBackup implements Upgrade { private final List _steps; public RestoreBackup( - final Database server, + @Nullable final Database server, final EntityService entityService, final EntityRegistry entityRegistry, final SystemRestliEntityClient entityClient, final GraphService graphClient, final EntitySearchService searchClient) { - _steps = buildSteps(server, entityService, entityRegistry, entityClient, graphClient, searchClient); + if (server != null) { + _steps = + buildSteps( + server, entityService, entityRegistry, entityClient, graphClient, searchClient); + } else { + _steps = List.of(); + } } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java index 42f7f0073e59b5..5c4567c856d0ed 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/RestoreStorageStep.java @@ -21,7 +21,6 @@ import com.linkedin.metadata.models.EntitySpec; import com.linkedin.metadata.models.registry.EntityRegistry; import com.linkedin.util.Pair; - import java.lang.reflect.InvocationTargetException; import java.net.URISyntaxException; import java.util.ArrayList; @@ -35,7 +34,6 @@ import java.util.function.Function; import java.util.stream.Collectors; - public class RestoreStorageStep implements UpgradeStep { private static final int REPORT_BATCH_SIZE = 1000; @@ -43,11 +41,13 @@ public class RestoreStorageStep implements UpgradeStep { private final EntityService _entityService; private final EntityRegistry _entityRegistry; - private final Map>>> _backupReaders; + private final Map>>> + _backupReaders; private final ExecutorService _fileReaderThreadPool; private final ExecutorService _gmsThreadPool; - public RestoreStorageStep(final EntityService entityService, final EntityRegistry entityRegistry) { + public RestoreStorageStep( + final EntityService entityService, final EntityRegistry entityRegistry) { _entityService = entityService; _entityRegistry = entityRegistry; _backupReaders = ImmutableBiMap.of(LocalParquetReader.READER_NAME, LocalParquetReader.class); @@ -82,7 +82,6 @@ public int retryCount() { @Override public Function executable() { return (context) -> { - context.report().addLine("Starting backup restore..."); int numRows = 0; Optional backupReaderName = context.parsedArgs().get("BACKUP_READER"); @@ -93,19 +92,32 @@ public Function executable() { return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } - Class> clazz = _backupReaders.get(backupReaderName.get()); + Class> clazz = + _backupReaders.get(backupReaderName.get()); List argNames = BackupReaderArgs.getArgNames(clazz); - List> args = argNames.stream().map(argName -> context.parsedArgs().get(argName)).collect( - Collectors.toList()); + List> args = + argNames.stream() + .map(argName -> context.parsedArgs().get(argName)) + .collect(Collectors.toList()); BackupReader backupReader; try { backupReader = clazz.getConstructor(List.class).newInstance(args); - } catch (InstantiationException | InvocationTargetException | IllegalAccessException | NoSuchMethodException e) { + } catch (InstantiationException + | InvocationTargetException + | IllegalAccessException + | NoSuchMethodException e) { e.printStackTrace(); - context.report().addLine("Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); - throw new IllegalArgumentException("Invalid BackupReader: " + clazz.getSimpleName() + ", need to implement proper constructor."); + context + .report() + .addLine( + "Invalid BackupReader, not able to construct instance of " + clazz.getSimpleName()); + throw new IllegalArgumentException( + "Invalid BackupReader: " + + clazz.getSimpleName() + + ", need to implement proper constructor."); } - EbeanAspectBackupIterator iterator = backupReader.getBackupIterator(context); + EbeanAspectBackupIterator iterator = + backupReader.getBackupIterator(context); ReaderWrapper reader; List> futureList = new ArrayList<>(); while ((reader = iterator.getNextReader()) != null) { @@ -138,9 +150,12 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { urn = Urn.createFromString(aspect.getKey().getUrn()); } catch (Exception e) { - context.report() + context + .report() .addLine( - String.format("Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), e); + String.format( + "Failed to bind Urn with value %s into Urn object", aspect.getKey().getUrn()), + e); continue; } @@ -150,8 +165,11 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { entitySpec = _entityRegistry.getEntitySpec(entityName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find Entity with name %s in Entity Registry", entityName), e); + context + .report() + .addLine( + String.format("Failed to find Entity with name %s in Entity Registry", entityName), + e); continue; } final String aspectName = aspect.getKey().getAspect(); @@ -160,11 +178,16 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { final RecordTemplate aspectRecord; try { aspectRecord = - EntityUtils.toAspectRecord(entityName, aspectName, aspect.getMetadata(), _entityRegistry); + EntityUtils.toAspectRecord( + entityName, aspectName, aspect.getMetadata(), _entityRegistry); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to create aspect record with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to create aspect record with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } @@ -173,17 +196,27 @@ private void readerExecutable(ReaderWrapper reader, UpgradeContext context) { try { aspectSpec = entitySpec.getAspectSpec(aspectName); } catch (Exception e) { - context.report() - .addLine(String.format("Failed to find aspect spec with name %s associated with entity named %s", - aspectName, entityName), e); + context + .report() + .addLine( + String.format( + "Failed to find aspect spec with name %s associated with entity named %s", + aspectName, entityName), + e); continue; } // 5. Write the row back using the EntityService final long version = aspect.getKey().getVersion(); final AuditStamp auditStamp = toAuditStamp(aspect); - futureList.add(_gmsThreadPool.submit(() -> - _entityService.ingestAspects(urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null).get(0).getNewValue())); + futureList.add( + _gmsThreadPool.submit( + () -> + _entityService + .ingestAspects( + urn, List.of(Pair.of(aspectName, aspectRecord)), auditStamp, null) + .get(0) + .getNewValue())); if (numRows % REPORT_BATCH_SIZE == 0) { for (Future future : futureList) { try { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java index 7ea1811adfdd81..212f0da9f592d0 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReader.java @@ -3,10 +3,10 @@ import com.linkedin.datahub.upgrade.UpgradeContext; import javax.annotation.Nonnull; - /** - * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 object to be - * ingested back into GMS. Must have a constructor that takes a List of Optional Strings + * Base interface for BackupReader used for creating the BackupIterator to retrieve EbeanAspectV2 + * object to be ingested back into GMS. Must have a constructor that takes a List of Optional + * Strings */ public interface BackupReader { String getName(); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java index 20f43b5414ddd7..6176d56fbec958 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/BackupReaderArgs.java @@ -4,14 +4,9 @@ import java.util.List; import java.util.Map; - -/** - * Retains a map of what arguments are passed in to a backup reader - */ +/** Retains a map of what arguments are passed in to a backup reader */ public final class BackupReaderArgs { - private BackupReaderArgs() { - - } + private BackupReaderArgs() {} private static final Map, List> ARGS_MAP; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java index 3a2505311e2450..cce5928277a20d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/EbeanAspectBackupIterator.java @@ -7,10 +7,9 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - /** - * Base interface for iterators that retrieves EbeanAspectV2 objects - * This allows us to restore from backups of various format + * Base interface for iterators that retrieves EbeanAspectV2 objects This allows us to restore from + * backups of various format */ @Slf4j @RequiredArgsConstructor @@ -35,12 +34,13 @@ public T getNextReader() { @Override public void close() { - _readers.forEach(reader -> { - try { - reader.close(); - } catch (IOException e) { - log.error("Error while closing parquet reader", e); - } - }); + _readers.forEach( + reader -> { + try { + reader.close(); + } catch (IOException e) { + log.error("Error while closing parquet reader", e); + } + }); } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java index 9b8a3133ac04cb..9f0f81f466cfa4 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/LocalParquetReader.java @@ -14,10 +14,7 @@ import org.apache.parquet.avro.AvroParquetReader; import org.apache.parquet.hadoop.ParquetReader; - -/** - * BackupReader for retrieving EbeanAspectV2 objects from a local parquet file - */ +/** BackupReader for retrieving EbeanAspectV2 objects from a local parquet file */ @Slf4j public class LocalParquetReader implements BackupReader { @@ -46,16 +43,20 @@ public String getName() { public EbeanAspectBackupIterator getBackupIterator(UpgradeContext context) { Optional path = context.parsedArgs().get("BACKUP_FILE_PATH"); if (!path.isPresent()) { - context.report().addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); + context + .report() + .addLine("BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); throw new IllegalArgumentException( "BACKUP_FILE_PATH must be set to run RestoreBackup through local parquet file"); } try { - ParquetReader reader = AvroParquetReader.builder(new Path(path.get())).build(); - return new EbeanAspectBackupIterator<>(ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); + ParquetReader reader = + AvroParquetReader.builder(new Path(path.get())).build(); + return new EbeanAspectBackupIterator<>( + ImmutableList.of(new ParquetReaderWrapper(reader, path.get()))); } catch (IOException e) { throw new RuntimeException(String.format("Failed to build ParquetReader: %s", e)); } } -} \ No newline at end of file +} diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java index 2b7cacff652495..01c502221f77f9 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ParquetReaderWrapper.java @@ -15,9 +15,9 @@ @Slf4j public class ParquetReaderWrapper extends ReaderWrapper { - private final static long NANOS_PER_MILLISECOND = 1000000; - private final static long MILLIS_IN_DAY = 86400000; - private final static long JULIAN_EPOCH_OFFSET_DAYS = 2440588; + private static final long NANOS_PER_MILLISECOND = 1000000; + private static final long MILLIS_IN_DAY = 86400000; + private static final long JULIAN_EPOCH_OFFSET_DAYS = 2440588; private final ParquetReader _parquetReader; @@ -45,22 +45,30 @@ EbeanAspectV2 convertRecord(GenericRecord record) { ts = (Long) record.get("createdon"); } - return new EbeanAspectV2(record.get("urn").toString(), record.get("aspect").toString(), - (Long) record.get("version"), record.get("metadata").toString(), - Timestamp.from(Instant.ofEpochMilli(ts / 1000)), record.get("createdby").toString(), + return new EbeanAspectV2( + record.get("urn").toString(), + record.get("aspect").toString(), + (Long) record.get("version"), + record.get("metadata").toString(), + Timestamp.from(Instant.ofEpochMilli(ts / 1000)), + record.get("createdby").toString(), Optional.ofNullable(record.get("createdfor")).map(Object::toString).orElse(null), Optional.ofNullable(record.get("systemmetadata")).map(Object::toString).orElse(null)); } private long convertFixed96IntToTs(GenericFixed createdon) { // From https://github.com/apache/parquet-format/pull/49/filesParquetTimestampUtils.java - // and ParquetTimestampUtils.java from https://github.com/kube-reporting/presto/blob/master/presto-parquet/ + // and ParquetTimestampUtils.java from + // https://github.com/kube-reporting/presto/blob/master/presto-parquet/ // src/main/java/io/prestosql/parquet/ParquetTimestampUtils.java byte[] bytes = createdon.bytes(); // little endian encoding - need to invert byte order - long timeOfDayNanos = Longs.fromBytes(bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); + long timeOfDayNanos = + Longs.fromBytes( + bytes[7], bytes[6], bytes[5], bytes[4], bytes[3], bytes[2], bytes[1], bytes[0]); int julianDay = Ints.fromBytes(bytes[11], bytes[10], bytes[9], bytes[8]); - return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + (timeOfDayNanos / NANOS_PER_MILLISECOND); + return ((julianDay - JULIAN_EPOCH_OFFSET_DAYS) * MILLIS_IN_DAY) + + (timeOfDayNanos / NANOS_PER_MILLISECOND); } @Override diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java index d0db42e678eea2..48d0fa2fda04c3 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restorebackup/backupreader/ReaderWrapper.java @@ -5,9 +5,10 @@ import java.io.IOException; import lombok.extern.slf4j.Slf4j; - /** - * Abstract class that reads entries from a given source and transforms then into {@link EbeanAspectV2} instances. + * Abstract class that reads entries from a given source and transforms then into {@link + * EbeanAspectV2} instances. + * * @param The object type to read from a reader source. */ @Slf4j @@ -69,9 +70,15 @@ record = read(); abstract EbeanAspectV2 convertRecord(T record); private void printStat(String prefix) { - log.info("{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," - + " records failed: {}, Total millis in convert: {}", prefix, _fileName, - recordsProcessed, totalTimeSpentInRead / 1000 / 1000, recordsSkipped, recordsFailed, + log.info( + "{} Reader {}. Stats: records processed: {}, Total millis spent in reading: {}, records skipped: {}," + + " records failed: {}, Total millis in convert: {}", + prefix, + _fileName, + recordsProcessed, + totalTimeSpentInRead / 1000 / 1000, + recordsSkipped, + recordsFailed, totalTimeSpentInConvert / 1000 / 1000); } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java index 3c0a9762a28c92..f46bb9b05624db 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/RestoreIndices.java @@ -13,7 +13,7 @@ import io.ebean.Database; import java.util.ArrayList; import java.util.List; - +import javax.annotation.Nullable; public class RestoreIndices implements Upgrade { public static final String BATCH_SIZE_ARG_NAME = "batchSize"; @@ -24,15 +24,23 @@ public class RestoreIndices implements Upgrade { public static final String WRITER_POOL_SIZE = "WRITER_POOL_SIZE"; public static final String URN_ARG_NAME = "urn"; public static final String URN_LIKE_ARG_NAME = "urnLike"; + public static final String URN_BASED_PAGINATION_ARG_NAME = "urnBasedPagination"; public static final String STARTING_OFFSET_ARG_NAME = "startingOffset"; private final List _steps; - public RestoreIndices(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + public RestoreIndices( + @Nullable final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { - _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + if (server != null) { + _steps = buildSteps(server, entityService, entityRegistry, entitySearchService, graphService); + } else { + _steps = List.of(); + } } @Override @@ -45,8 +53,11 @@ public List steps() { return _steps; } - private List buildSteps(final Database server, final EntityService entityService, - final EntityRegistry entityRegistry, final EntitySearchService entitySearchService, + private List buildSteps( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry, + final EntitySearchService entitySearchService, final GraphService graphService) { final List steps = new ArrayList<>(); steps.add(new ClearSearchServiceStep(entitySearchService, false)); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java index 2ac4fea2e653ac..574b1f08b5f543 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/restoreindices/SendMAEStep.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.restoreindices; +import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; + import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; @@ -11,7 +13,6 @@ import com.linkedin.metadata.models.registry.EntityRegistry; import io.ebean.Database; import io.ebean.ExpressionList; - import java.util.ArrayList; import java.util.List; import java.util.Map; @@ -23,9 +24,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.function.Function; -import static com.linkedin.metadata.Constants.ASPECT_LATEST_VERSION; - - public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_BATCH_SIZE = 1000; @@ -33,24 +31,30 @@ public class SendMAEStep implements UpgradeStep { private static final int DEFAULT_STARTING_OFFSET = 0; private static final int DEFAULT_THREADS = 1; + private static final boolean DEFAULT_URN_BASED_PAGINATION = false; private final Database _server; private final EntityService _entityService; public class KafkaJob implements Callable { - UpgradeContext context; - RestoreIndicesArgs args; - public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { - this.context = context; - this.args = args; - } - @Override - public RestoreIndicesResult call() { - return _entityService.restoreIndices(args, context.report()::addLine); - } + UpgradeContext context; + RestoreIndicesArgs args; + + public KafkaJob(UpgradeContext context, RestoreIndicesArgs args) { + this.context = context; + this.args = args; + } + + @Override + public RestoreIndicesResult call() { + return _entityService.restoreIndices(args, context.report()::addLine); + } } - public SendMAEStep(final Database server, final EntityService entityService, final EntityRegistry entityRegistry) { + public SendMAEStep( + final Database server, + final EntityService entityService, + final EntityRegistry entityRegistry) { _server = server; _entityService = entityService; } @@ -67,7 +71,7 @@ public int retryCount() { private List iterateFutures(List> futures) { List result = new ArrayList<>(); - for (Future future: new ArrayList<>(futures)) { + for (Future future : new ArrayList<>(futures)) { if (future.isDone()) { try { result.add(future.get()); @@ -86,6 +90,7 @@ private RestoreIndicesArgs getArgs(UpgradeContext context) { result.numThreads = getThreadCount(context.parsedArgs()); result.batchDelayMs = getBatchDelayMs(context.parsedArgs()); result.start = getStartingOffset(context.parsedArgs()); + result.urnBasedPagination = getUrnBasedPagination(context.parsedArgs()); if (containsKey(context.parsedArgs(), RestoreIndices.ASPECT_NAME_ARG_NAME)) { result.aspectName = context.parsedArgs().get(RestoreIndices.ASPECT_NAME_ARG_NAME).get(); } @@ -100,9 +105,10 @@ private RestoreIndicesArgs getArgs(UpgradeContext context) { private int getRowCount(RestoreIndicesArgs args) { ExpressionList countExp = - _server.find(EbeanAspectV2.class) - .where() - .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); + _server + .find(EbeanAspectV2.class) + .where() + .eq(EbeanAspectV2.VERSION_COLUMN, ASPECT_LATEST_VERSION); if (args.aspectName != null) { countExp = countExp.eq(EbeanAspectV2.ASPECT_COLUMN, args.aspectName); } @@ -120,45 +126,88 @@ public Function executable() { return (context) -> { RestoreIndicesResult finalJobResult = new RestoreIndicesResult(); RestoreIndicesArgs args = getArgs(context); - ThreadPoolExecutor executor = (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); + ThreadPoolExecutor executor = + (ThreadPoolExecutor) Executors.newFixedThreadPool(args.numThreads); context.report().addLine("Sending MAE from local DB"); long startTime = System.currentTimeMillis(); final int rowCount = getRowCount(args); - context.report().addLine(String.format("Found %s latest aspects in aspects table in %.2f minutes.", - rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); + context + .report() + .addLine( + String.format( + "Found %s latest aspects in aspects table in %.2f minutes.", + rowCount, (float) (System.currentTimeMillis() - startTime) / 1000 / 60)); int start = args.start; List> futures = new ArrayList<>(); startTime = System.currentTimeMillis(); - while (start < rowCount) { - args = args.clone(); - args.start = start; - futures.add(executor.submit(new KafkaJob(context, args))); - start = start + args.batchSize; - } - while (futures.size() > 0) { - List tmpResults = iterateFutures(futures); - for (RestoreIndicesResult tmpResult: tmpResults) { - reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + if (args.urnBasedPagination) { + RestoreIndicesResult previousResult = null; + int rowsProcessed = 1; + while (rowsProcessed > 0) { + args = args.clone(); + if (previousResult != null) { + args.lastUrn = previousResult.lastUrn; + args.lastAspect = previousResult.lastAspect; + } + args.start = start; + context + .report() + .addLine( + String.format( + "Getting next batch of urns + aspects, starting with %s - %s", + args.lastUrn, args.lastAspect)); + Future future = executor.submit(new KafkaJob(context, args)); + try { + RestoreIndicesResult result = future.get(); + reportStats(context, finalJobResult, result, rowCount, startTime); + previousResult = result; + rowsProcessed = result.rowsMigrated + result.ignored; + context.report().addLine(String.format("Rows processed this loop %d", rowsProcessed)); + start += args.batchSize; + } catch (InterruptedException | ExecutionException e) { + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + } + } else { + while (start < rowCount) { + args = args.clone(); + args.start = start; + futures.add(executor.submit(new KafkaJob(context, args))); + start = start + args.batchSize; + } + while (futures.size() > 0) { + List tmpResults = iterateFutures(futures); + for (RestoreIndicesResult tmpResult : tmpResults) { + reportStats(context, finalJobResult, tmpResult, rowCount, startTime); + } } } + executor.shutdown(); if (finalJobResult.rowsMigrated != rowCount) { float percentFailed = 0.0f; if (rowCount > 0) { percentFailed = (float) (rowCount - finalJobResult.rowsMigrated) * 100 / rowCount; } - context.report().addLine(String.format( - "Failed to send MAEs for %d rows (%.2f%% of total).", - rowCount - finalJobResult.rowsMigrated, percentFailed)); + context + .report() + .addLine( + String.format( + "Failed to send MAEs for %d rows (%.2f%% of total).", + rowCount - finalJobResult.rowsMigrated, percentFailed)); } return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); }; } - private static void reportStats(UpgradeContext context, RestoreIndicesResult finalResult, RestoreIndicesResult tmpResult, - int rowCount, long startTime) { + private static void reportStats( + UpgradeContext context, + RestoreIndicesResult finalResult, + RestoreIndicesResult tmpResult, + int rowCount, + long startTime) { finalResult.ignored += tmpResult.ignored; finalResult.rowsMigrated += tmpResult.rowsMigrated; finalResult.timeSqlQueryMs += tmpResult.timeSqlQueryMs; @@ -178,11 +227,22 @@ private static void reportStats(UpgradeContext context, RestoreIndicesResult fin estimatedTimeMinutesComplete = timeSoFarMinutes * (100 - percentSent) / percentSent; } float totalTimeComplete = timeSoFarMinutes + estimatedTimeMinutesComplete; - context.report().addLine(String.format( - "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", - finalResult.rowsMigrated, rowCount, percentSent, finalResult.ignored, percentIgnored)); - context.report().addLine(String.format("%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", - timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); + context + .report() + .addLine( + String.format( + "Successfully sent MAEs for %s/%s rows (%.2f%% of total). %s rows ignored (%.2f%% of total)", + finalResult.rowsMigrated, + rowCount, + percentSent, + finalResult.ignored, + percentIgnored)); + context + .report() + .addLine( + String.format( + "%.2f mins taken. %.2f est. mins to completion. Total mins est. = %.2f.", + timeSoFarMinutes, estimatedTimeMinutesComplete, totalTimeComplete)); } private int getBatchSize(final Map> parsedArgs) { @@ -196,7 +256,8 @@ private int getStartingOffset(final Map> parsedArgs) { private long getBatchDelayMs(final Map> parsedArgs) { long resolvedBatchDelayMs = DEFAULT_BATCH_DELAY_MS; if (containsKey(parsedArgs, RestoreIndices.BATCH_DELAY_MS_ARG_NAME)) { - resolvedBatchDelayMs = Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); + resolvedBatchDelayMs = + Long.parseLong(parsedArgs.get(RestoreIndices.BATCH_DELAY_MS_ARG_NAME).get()); } return resolvedBatchDelayMs; } @@ -205,7 +266,17 @@ private int getThreadCount(final Map> parsedArgs) { return getInt(parsedArgs, DEFAULT_THREADS, RestoreIndices.NUM_THREADS_ARG_NAME); } - private int getInt(final Map> parsedArgs, int defaultVal, String argKey) { + private boolean getUrnBasedPagination(final Map> parsedArgs) { + boolean urnBasedPagination = DEFAULT_URN_BASED_PAGINATION; + if (containsKey(parsedArgs, RestoreIndices.URN_BASED_PAGINATION_ARG_NAME)) { + urnBasedPagination = + Boolean.parseBoolean(parsedArgs.get(RestoreIndices.URN_BASED_PAGINATION_ARG_NAME).get()); + } + return urnBasedPagination; + } + + private int getInt( + final Map> parsedArgs, int defaultVal, String argKey) { int result = defaultVal; if (containsKey(parsedArgs, argKey)) { result = Integer.parseInt(parsedArgs.get(argKey).get()); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java index 4a8211f2cd4ace..aba751bff8177d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/SystemUpdate.java @@ -8,47 +8,48 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.steps.DataHubStartupStep; import com.linkedin.datahub.upgrade.system.entity.steps.BackfillBrowsePathsV2; import com.linkedin.metadata.dao.producer.KafkaEventProducer; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class SystemUpdate implements Upgrade { - private final List _preStartupUpgrades; - private final List _postStartupUpgrades; - private final List _steps; - - public SystemUpdate(final BuildIndices buildIndicesJob, final CleanIndices cleanIndicesJob, - final KafkaEventProducer kafkaEventProducer, final String version, - final BackfillBrowsePathsV2 backfillBrowsePathsV2) { - - _preStartupUpgrades = List.of(buildIndicesJob); - _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); - _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); - } - - @Override - public String id() { - return "SystemUpdate"; - } - - @Override - public List steps() { - return Stream.concat(Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), - _steps.stream()), - _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) - .collect(Collectors.toList()); - } - - @Override - public List cleanupSteps() { - return Stream.concat( - _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), - _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) - .collect(Collectors.toList()); - } + private final List _preStartupUpgrades; + private final List _postStartupUpgrades; + private final List _steps; + + public SystemUpdate( + final BuildIndices buildIndicesJob, + final CleanIndices cleanIndicesJob, + final KafkaEventProducer kafkaEventProducer, + final String version, + final BackfillBrowsePathsV2 backfillBrowsePathsV2) { + + _preStartupUpgrades = List.of(buildIndicesJob); + _steps = List.of(new DataHubStartupStep(kafkaEventProducer, version)); + _postStartupUpgrades = List.of(cleanIndicesJob, backfillBrowsePathsV2); + } + + @Override + public String id() { + return "SystemUpdate"; + } + + @Override + public List steps() { + return Stream.concat( + Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.steps().stream()), _steps.stream()), + _postStartupUpgrades.stream().flatMap(up -> up.steps().stream())) + .collect(Collectors.toList()); + } + + @Override + public List cleanupSteps() { + return Stream.concat( + _preStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream()), + _postStartupUpgrades.stream().flatMap(up -> up.cleanupSteps().stream())) + .collect(Collectors.toList()); + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java index 1da5b6d6a25cee..eb76a72fba71af 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/BuildIndices.java @@ -2,9 +2,9 @@ import com.linkedin.datahub.upgrade.Upgrade; import com.linkedin.datahub.upgrade.UpgradeStep; -import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPostStep; import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesPreStep; +import com.linkedin.datahub.upgrade.system.elasticsearch.steps.BuildIndicesStep; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; import com.linkedin.metadata.graph.GraphService; @@ -17,49 +17,54 @@ import java.util.stream.Collectors; import java.util.stream.Stream; - public class BuildIndices implements Upgrade { - private final List _steps; - - public BuildIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - - final ConfigurationProvider configurationProvider) { - + private final List _steps; - List indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); + public BuildIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { - _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); - } + List indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); - @Override - public String id() { - return "BuildIndices"; - } + _steps = buildSteps(indexedServices, baseElasticSearchComponents, configurationProvider); + } - @Override - public List steps() { - return _steps; - } + @Override + public String id() { + return "BuildIndices"; + } - private List buildSteps(final List indexedServices, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { + @Override + public List steps() { + return _steps; + } - final List steps = new ArrayList<>(); - // Disable ES write mode/change refresh rate and clone indices - steps.add(new BuildIndicesPreStep(baseElasticSearchComponents, indexedServices, configurationProvider)); - // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService - steps.add(new BuildIndicesStep(indexedServices)); - // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in pre-configure step if it already exists? - steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); - return steps; - } + private List buildSteps( + final List indexedServices, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + final List steps = new ArrayList<>(); + // Disable ES write mode/change refresh rate and clone indices + steps.add( + new BuildIndicesPreStep( + baseElasticSearchComponents, indexedServices, configurationProvider)); + // Configure graphService, entitySearchService, systemMetadataService, timeseriesAspectService + steps.add(new BuildIndicesStep(indexedServices)); + // Reset configuration (and delete clones? Or just do this regularly? Or delete clone in + // pre-configure step if it already exists? + steps.add(new BuildIndicesPostStep(baseElasticSearchComponents, indexedServices)); + return steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java index 1fb9c8526ad3b0..ad68386622b216 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/CleanIndices.java @@ -10,41 +10,45 @@ import com.linkedin.metadata.shared.ElasticSearchIndexed; import com.linkedin.metadata.systemmetadata.SystemMetadataService; import com.linkedin.metadata.timeseries.TimeseriesAspectService; -import lombok.extern.slf4j.Slf4j; - import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; - +import lombok.extern.slf4j.Slf4j; @Slf4j public class CleanIndices implements Upgrade { - private final List _steps; - - public CleanIndices(final SystemMetadataService systemMetadataService, final TimeseriesAspectService timeseriesAspectService, - final EntitySearchService entitySearchService, final GraphService graphService, - final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents baseElasticSearchComponents, - final ConfigurationProvider configurationProvider) { - - List indexedServices = Stream.of( - graphService, entitySearchService, systemMetadataService, timeseriesAspectService) - .filter(service -> service instanceof ElasticSearchIndexed) - .map(service -> (ElasticSearchIndexed) service) - .collect(Collectors.toList()); - - _steps = List.of(new CleanIndicesStep( + private final List _steps; + + public CleanIndices( + final SystemMetadataService systemMetadataService, + final TimeseriesAspectService timeseriesAspectService, + final EntitySearchService entitySearchService, + final GraphService graphService, + final BaseElasticSearchComponentsFactory.BaseElasticSearchComponents + baseElasticSearchComponents, + final ConfigurationProvider configurationProvider) { + + List indexedServices = + Stream.of(graphService, entitySearchService, systemMetadataService, timeseriesAspectService) + .filter(service -> service instanceof ElasticSearchIndexed) + .map(service -> (ElasticSearchIndexed) service) + .collect(Collectors.toList()); + + _steps = + List.of( + new CleanIndicesStep( baseElasticSearchComponents.getSearchClient(), configurationProvider.getElasticSearch(), indexedServices)); - } + } - @Override - public String id() { - return "CleanIndices"; - } + @Override + public String id() { + return "CleanIndices"; + } - @Override - public List steps() { - return _steps; - } + @Override + public List steps() { + return _steps; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java index 2feca1f27e6258..a44f6d6487067d 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPostStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -13,16 +16,11 @@ import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.opensearch.client.RequestOptions; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPostStep implements UpgradeStep { @@ -45,8 +43,9 @@ public Function executable() { return (context) -> { try { - List indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); // Reset write blocking @@ -56,12 +55,26 @@ public Function executable() { request.settings(indexSettings); boolean ack = - _esComponents.getSearchClient().indices().putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); if (ack) { - ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexConfig.name(), false); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexConfig.name(), indexSettings, ack); + ack = + IndexUtils.validateWriteBlock( + _esComponents.getSearchClient(), indexConfig.name(), false); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexConfig.name(), + indexSettings, + ack); } if (!ack) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java index 82b9428c89fb8f..c25888be07f899 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesPreStep.java @@ -1,5 +1,8 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; +import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; + import com.google.common.collect.ImmutableMap; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; @@ -8,15 +11,13 @@ import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; import com.linkedin.gms.factory.config.ConfigurationProvider; import com.linkedin.gms.factory.search.BaseElasticSearchComponentsFactory; - +import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; +import com.linkedin.metadata.shared.ElasticSearchIndexed; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.function.Function; import java.util.stream.Collectors; - -import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; -import com.linkedin.metadata.shared.ElasticSearchIndexed; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.opensearch.OpenSearchStatusException; @@ -24,10 +25,6 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.indices.ResizeRequest; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.INDEX_BLOCKS_WRITE_SETTING; -import static com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils.getAllReindexConfigs; - - @RequiredArgsConstructor @Slf4j public class BuildIndicesPreStep implements UpgradeStep { @@ -50,16 +47,19 @@ public Function executable() { return (context) -> { try { // Get indices to update - List indexConfigs = getAllReindexConfigs(_services) - .stream().filter(ReindexConfig::requiresReindex) + List indexConfigs = + getAllReindexConfigs(_services).stream() + .filter(ReindexConfig::requiresReindex) .collect(Collectors.toList()); for (ReindexConfig indexConfig : indexConfigs) { - String indexName = IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); + String indexName = + IndexUtils.resolveAlias(_esComponents.getSearchClient(), indexConfig.name()); boolean ack = blockWrites(indexName); if (!ack) { - log.error("Partial index settings update, some indices may still be blocking writes." + log.error( + "Partial index settings update, some indices may still be blocking writes." + " Please fix the error and re-run the BuildIndices upgrade job."); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } @@ -69,10 +69,16 @@ public Function executable() { String clonedName = indexConfig.name() + "_clone_" + System.currentTimeMillis(); ResizeRequest resizeRequest = new ResizeRequest(clonedName, indexName); boolean cloneAck = - _esComponents.getSearchClient().indices().clone(resizeRequest, RequestOptions.DEFAULT).isAcknowledged(); + _esComponents + .getSearchClient() + .indices() + .clone(resizeRequest, RequestOptions.DEFAULT) + .isAcknowledged(); log.info("Cloned index {} into {}, Acknowledged: {}", indexName, clonedName, cloneAck); if (!cloneAck) { - log.error("Partial index settings update, cloned indices may need to be cleaned up: {}", clonedName); + log.error( + "Partial index settings update, cloned indices may need to be cleaned up: {}", + clonedName); return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); } } @@ -85,8 +91,6 @@ public Function executable() { }; } - - private boolean blockWrites(String indexName) throws InterruptedException, IOException { UpdateSettingsRequest request = new UpdateSettingsRequest(indexName); Map indexSettings = ImmutableMap.of(INDEX_BLOCKS_WRITE_SETTING, "true"); @@ -94,13 +98,23 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc request.settings(indexSettings); boolean ack; try { - ack = _esComponents.getSearchClient().indices() - .putSettings(request, RequestOptions.DEFAULT).isAcknowledged(); - log.info("Updated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + ack = + _esComponents + .getSearchClient() + .indices() + .putSettings(request, RequestOptions.DEFAULT) + .isAcknowledged(); + log.info( + "Updated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } catch (OpenSearchStatusException | IOException ese) { - // Cover first run case, indices won't exist so settings updates won't work nor will the rest of the preConfigure steps. + // Cover first run case, indices won't exist so settings updates won't work nor will the rest + // of the preConfigure steps. // Since no data are in there they are skippable. - // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way to extract it :( + // Have to hack around HighLevelClient not sending the actual Java type nor having an easy way + // to extract it :( if (ese.getMessage().contains("index_not_found")) { return true; } else { @@ -110,7 +124,11 @@ private boolean blockWrites(String indexName) throws InterruptedException, IOExc if (ack) { ack = IndexUtils.validateWriteBlock(_esComponents.getSearchClient(), indexName, true); - log.info("Validated index {} with new settings. Settings: {}, Acknowledged: {}", indexName, indexSettings, ack); + log.info( + "Validated index {} with new settings. Settings: {}, Acknowledged: {}", + indexName, + indexSettings, + ack); } return ack; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java index ef59f2998929e3..d37ee173bd9af8 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/BuildIndicesStep.java @@ -5,13 +5,11 @@ import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.shared.ElasticSearchIndexed; - import java.util.List; import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; - @Slf4j @RequiredArgsConstructor public class BuildIndicesStep implements UpgradeStep { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java index bb042bac6df955..c3a4d8ab89c070 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/CleanIndicesStep.java @@ -1,54 +1,55 @@ package com.linkedin.datahub.upgrade.system.elasticsearch.steps; -import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.datahub.upgrade.UpgradeContext; import com.linkedin.datahub.upgrade.UpgradeStep; import com.linkedin.datahub.upgrade.UpgradeStepResult; import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.datahub.upgrade.system.elasticsearch.util.IndexUtils; +import com.linkedin.metadata.config.search.ElasticSearchConfiguration; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; import com.linkedin.metadata.shared.ElasticSearchIndexed; -import lombok.extern.slf4j.Slf4j; -import org.opensearch.client.RestHighLevelClient; - import java.util.List; import java.util.function.Function; - +import lombok.extern.slf4j.Slf4j; +import org.opensearch.client.RestHighLevelClient; @Slf4j public class CleanIndicesStep implements UpgradeStep { - private final RestHighLevelClient searchClient; - private final ElasticSearchConfiguration esConfig; - private final List indexedServices; - - public CleanIndicesStep(final RestHighLevelClient searchClient, final ElasticSearchConfiguration esConfig, - final List indexedServices) { - this.searchClient = searchClient; - this.esConfig = esConfig; - this.indexedServices = indexedServices; - } - - @Override - public String id() { - return "CleanUpIndicesStep"; - } - - @Override - public int retryCount() { - return 0; - } - - @Override - public Function executable() { - return (context) -> { - try { - IndexUtils.getAllReindexConfigs(indexedServices) - .forEach(reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); - } catch (Exception e) { - log.error("CleanUpIndicesStep failed.", e); - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); - } - return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); - }; - } + private final RestHighLevelClient searchClient; + private final ElasticSearchConfiguration esConfig; + private final List indexedServices; + + public CleanIndicesStep( + final RestHighLevelClient searchClient, + final ElasticSearchConfiguration esConfig, + final List indexedServices) { + this.searchClient = searchClient; + this.esConfig = esConfig; + this.indexedServices = indexedServices; + } + + @Override + public String id() { + return "CleanUpIndicesStep"; + } + + @Override + public int retryCount() { + return 0; + } + + @Override + public Function executable() { + return (context) -> { + try { + IndexUtils.getAllReindexConfigs(indexedServices) + .forEach( + reindexConfig -> ESIndexBuilder.cleanIndex(searchClient, esConfig, reindexConfig)); + } catch (Exception e) { + log.error("CleanUpIndicesStep failed.", e); + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.FAILED); + } + return new DefaultUpgradeStepResult(id(), UpgradeStepResult.Result.SUCCEEDED); + }; + } } diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java index 1e568f1e9a9fec..b4a506c3f5c63c 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/steps/DataHubStartupStep.java @@ -6,12 +6,10 @@ import com.linkedin.datahub.upgrade.impl.DefaultUpgradeStepResult; import com.linkedin.metadata.dao.producer.KafkaEventProducer; import com.linkedin.mxe.DataHubUpgradeHistoryEvent; +import java.util.function.Function; import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import java.util.function.Function; - - @RequiredArgsConstructor @Slf4j public class DataHubStartupStep implements UpgradeStep { @@ -32,8 +30,8 @@ public int retryCount() { public Function executable() { return (context) -> { try { - DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = new DataHubUpgradeHistoryEvent() - .setVersion(_version); + DataHubUpgradeHistoryEvent dataHubUpgradeHistoryEvent = + new DataHubUpgradeHistoryEvent().setVersion(_version); _kafkaEventProducer.produceDataHubUpgradeHistoryEvent(dataHubUpgradeHistoryEvent); log.info("Initiating startup for version: {}", _version); } catch (Exception e) { diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java index d9788448444eda..b3de7c503fb3e5 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/elasticsearch/util/IndexUtils.java @@ -2,6 +2,10 @@ import com.linkedin.metadata.search.elasticsearch.indexbuilder.ReindexConfig; import com.linkedin.metadata.shared.ElasticSearchIndexed; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Set; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.NotImplementedException; import org.opensearch.action.admin.indices.alias.get.GetAliasesRequest; @@ -11,22 +15,18 @@ import org.opensearch.client.RequestOptions; import org.opensearch.client.RestHighLevelClient; -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - - @Slf4j public class IndexUtils { public static final String INDEX_BLOCKS_WRITE_SETTING = "index.blocks.write"; public static final int INDEX_BLOCKS_WRITE_RETRY = 4; public static final int INDEX_BLOCKS_WRITE_WAIT_SECONDS = 10; - private IndexUtils() { } + + private IndexUtils() {} private static List _reindexConfigs = new ArrayList<>(); - public static List getAllReindexConfigs(List elasticSearchIndexedList) throws IOException { + public static List getAllReindexConfigs( + List elasticSearchIndexedList) throws IOException { // Avoid locking & reprocessing List reindexConfigs = new ArrayList<>(_reindexConfigs); if (reindexConfigs.isEmpty()) { @@ -39,19 +39,24 @@ public static List getAllReindexConfigs(List 0) { - GetSettingsResponse response = esClient.indices().getSettings(request, RequestOptions.DEFAULT); - if (response.getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING).equals(String.valueOf(expectedState))) { + GetSettingsResponse response = + esClient.indices().getSettings(request, RequestOptions.DEFAULT); + if (response + .getSetting(finalIndexName, INDEX_BLOCKS_WRITE_SETTING) + .equals(String.valueOf(expectedState))) { return true; } count = count - 1; @@ -64,20 +69,20 @@ public static boolean validateWriteBlock(RestHighLevelClient esClient, String in return false; } - public static String resolveAlias(RestHighLevelClient esClient, String indexName) throws IOException { + public static String resolveAlias(RestHighLevelClient esClient, String indexName) + throws IOException { String finalIndexName = indexName; - GetAliasesResponse aliasResponse = esClient.indices() - .getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); + GetAliasesResponse aliasResponse = + esClient.indices().getAlias(new GetAliasesRequest(indexName), RequestOptions.DEFAULT); if (!aliasResponse.getAliases().isEmpty()) { Set indices = aliasResponse.getAliases().keySet(); if (indices.size() != 1) { throw new NotImplementedException( - String.format("Clone not supported for %s indices in alias %s. Indices: %s", - indices.size(), - indexName, - String.join(",", indices))); + String.format( + "Clone not supported for %s indices in alias %s. Indices: %s", + indices.size(), indexName, String.join(",", indices))); } finalIndexName = indices.stream().findFirst().get(); log.info("Alias {} resolved to index {}", indexName, finalIndexName); diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java index e213c0b2fd4dec..03f0b0b7f2ec2f 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2.java @@ -7,7 +7,6 @@ import com.linkedin.metadata.search.SearchService; import java.util.List; - public class BackfillBrowsePathsV2 implements Upgrade { private final List _steps; diff --git a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java index 08a752d9597f42..610d9069337a52 100644 --- a/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java +++ b/datahub-upgrade/src/main/java/com/linkedin/datahub/upgrade/system/entity/steps/BackfillBrowsePathsV2Step.java @@ -1,5 +1,7 @@ package com.linkedin.datahub.upgrade.system.entity.steps; +import static com.linkedin.metadata.Constants.*; + import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.linkedin.common.AuditStamp; @@ -27,32 +29,29 @@ import com.linkedin.metadata.utils.GenericRecordUtils; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.SystemMetadata; +import java.util.Set; import java.util.function.Function; import lombok.extern.slf4j.Slf4j; -import java.util.Set; - -import static com.linkedin.metadata.Constants.*; - - @Slf4j public class BackfillBrowsePathsV2Step implements UpgradeStep { public static final String BACKFILL_BROWSE_PATHS_V2 = "BACKFILL_BROWSE_PATHS_V2"; - public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; + public static final String REPROCESS_DEFAULT_BROWSE_PATHS_V2 = + "REPROCESS_DEFAULT_BROWSE_PATHS_V2"; public static final String DEFAULT_BROWSE_PATH_V2 = "␟Default"; - private static final Set ENTITY_TYPES_TO_MIGRATE = ImmutableSet.of( - Constants.DATASET_ENTITY_NAME, - Constants.DASHBOARD_ENTITY_NAME, - Constants.CHART_ENTITY_NAME, - Constants.DATA_JOB_ENTITY_NAME, - Constants.DATA_FLOW_ENTITY_NAME, - Constants.ML_MODEL_ENTITY_NAME, - Constants.ML_MODEL_GROUP_ENTITY_NAME, - Constants.ML_FEATURE_TABLE_ENTITY_NAME, - Constants.ML_FEATURE_ENTITY_NAME - ); + private static final Set ENTITY_TYPES_TO_MIGRATE = + ImmutableSet.of( + Constants.DATASET_ENTITY_NAME, + Constants.DASHBOARD_ENTITY_NAME, + Constants.CHART_ENTITY_NAME, + Constants.DATA_JOB_ENTITY_NAME, + Constants.DATA_FLOW_ENTITY_NAME, + Constants.ML_MODEL_ENTITY_NAME, + Constants.ML_MODEL_GROUP_ENTITY_NAME, + Constants.ML_FEATURE_TABLE_ENTITY_NAME, + Constants.ML_FEATURE_ENTITY_NAME); private static final Integer BATCH_SIZE = 5000; private final EntityService _entityService; @@ -67,14 +66,18 @@ public BackfillBrowsePathsV2Step(EntityService entityService, SearchService sear public Function executable() { return (context) -> { final AuditStamp auditStamp = - new AuditStamp().setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)).setTime(System.currentTimeMillis()); + new AuditStamp() + .setActor(UrnUtils.getUrn(Constants.SYSTEM_ACTOR)) + .setTime(System.currentTimeMillis()); String scrollId = null; for (String entityType : ENTITY_TYPES_TO_MIGRATE) { int migratedCount = 0; do { - log.info(String.format("Upgrading batch %s-%s of browse paths for entity type %s", migratedCount, - migratedCount + BATCH_SIZE, entityType)); + log.info( + String.format( + "Upgrading batch %s-%s of browse paths for entity type %s", + migratedCount, migratedCount + BATCH_SIZE, entityType)); scrollId = backfillBrowsePathsV2(entityType, auditStamp, scrollId); migratedCount += BATCH_SIZE; } while (scrollId != null); @@ -88,22 +91,26 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S final Filter filter; if (System.getenv().containsKey(REPROCESS_DEFAULT_BROWSE_PATHS_V2) - && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { + && Boolean.parseBoolean(System.getenv(REPROCESS_DEFAULT_BROWSE_PATHS_V2))) { filter = backfillDefaultBrowsePathsV2Filter(); - } else { + } else { filter = backfillBrowsePathsV2Filter(); } - final ScrollResult scrollResult = _searchService.scrollAcrossEntities( - ImmutableList.of(entityType), - "*", - filter, - null, - scrollId, - null, - BATCH_SIZE, - new SearchFlags().setFulltext(true).setSkipCache(true).setSkipHighlighting(true).setSkipAggregates(true) - ); + final ScrollResult scrollResult = + _searchService.scrollAcrossEntities( + ImmutableList.of(entityType), + "*", + filter, + null, + scrollId, + null, + BATCH_SIZE, + new SearchFlags() + .setFulltext(true) + .setSkipCache(true) + .setSkipHighlighting(true) + .setSkipAggregates(true)); if (scrollResult.getNumEntities() == 0 || scrollResult.getEntities().size() == 0) { return null; } @@ -113,7 +120,11 @@ private String backfillBrowsePathsV2(String entityType, AuditStamp auditStamp, S ingestBrowsePathsV2(searchEntity.getEntity(), auditStamp); } catch (Exception e) { // don't stop the whole step because of one bad urn or one bad ingestion - log.error(String.format("Error ingesting default browsePathsV2 aspect for urn %s", searchEntity.getEntity()), e); + log.error( + String.format( + "Error ingesting default browsePathsV2 aspect for urn %s", + searchEntity.getEntity()), + e); } } @@ -177,13 +188,10 @@ private void ingestBrowsePathsV2(Urn urn, AuditStamp auditStamp) throws Exceptio proposal.setEntityType(urn.getEntityType()); proposal.setAspectName(Constants.BROWSE_PATHS_V2_ASPECT_NAME); proposal.setChangeType(ChangeType.UPSERT); - proposal.setSystemMetadata(new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); + proposal.setSystemMetadata( + new SystemMetadata().setRunId(DEFAULT_RUN_ID).setLastObserved(System.currentTimeMillis())); proposal.setAspect(GenericRecordUtils.serializeAspect(browsePathsV2)); - _entityService.ingestProposal( - proposal, - auditStamp, - true - ); + _entityService.ingestProposal(proposal, auditStamp, true); } @Override @@ -192,7 +200,8 @@ public String id() { } /** - * Returns whether the upgrade should proceed if the step fails after exceeding the maximum retries. + * Returns whether the upgrade should proceed if the step fails after exceeding the maximum + * retries. */ @Override public boolean isOptional() { @@ -204,4 +213,3 @@ public boolean skip(UpgradeContext context) { return !Boolean.parseBoolean(System.getenv(BACKFILL_BROWSE_PATHS_V2)); } } - diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java index db697a40d0c6c9..83b8e028727cea 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/DatahubUpgradeNoSchemaRegistryTest.java @@ -1,70 +1,73 @@ package com.linkedin.datahub.upgrade; +import static org.testng.AssertJUnit.assertEquals; +import static org.testng.AssertJUnit.assertNotNull; + import com.linkedin.datahub.upgrade.system.SystemUpdate; +import java.util.List; +import java.util.Map; +import java.util.Optional; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import java.util.List; -import java.util.Map; -import java.util.Optional; - -import static org.testng.AssertJUnit.assertEquals; -import static org.testng.AssertJUnit.assertNotNull; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, - properties = { - "kafka.schemaRegistry.type=INTERNAL", - "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" - }) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}, + properties = { + "kafka.schemaRegistry.type=INTERNAL", + "DATAHUB_UPGRADE_HISTORY_TOPIC_NAME=test_due_topic" + }) public class DatahubUpgradeNoSchemaRegistryTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("systemUpdate") - private SystemUpdate systemUpdate; - - @Test - public void testSystemUpdateInit() { - assertNotNull(systemUpdate); - } + @Autowired + @Named("systemUpdate") + private SystemUpdate systemUpdate; - @Test - public void testSystemUpdateSend() { - UpgradeStepResult.Result result = systemUpdate.steps().stream() - .filter(s -> s.id().equals("DataHubStartupStep")) - .findFirst().get() - .executable().apply(new UpgradeContext() { - @Override - public Upgrade upgrade() { - return null; - } + @Test + public void testSystemUpdateInit() { + assertNotNull(systemUpdate); + } - @Override - public List stepResults() { - return null; - } + @Test + public void testSystemUpdateSend() { + UpgradeStepResult.Result result = + systemUpdate.steps().stream() + .filter(s -> s.id().equals("DataHubStartupStep")) + .findFirst() + .get() + .executable() + .apply( + new UpgradeContext() { + @Override + public Upgrade upgrade() { + return null; + } - @Override - public UpgradeReport report() { - return null; - } + @Override + public List stepResults() { + return null; + } - @Override - public List args() { - return null; - } + @Override + public UpgradeReport report() { + return null; + } - @Override - public Map> parsedArgs() { - return null; - } - }).result(); - assertEquals("SUCCEEDED", result.toString()); - } + @Override + public List args() { + return null; + } + @Override + public Map> parsedArgs() { + return null; + } + }) + .result(); + assertEquals("SUCCEEDED", result.toString()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java index 74cde414adc2f1..3e655be900bf28 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTest.java @@ -1,49 +1,48 @@ package com.linkedin.datahub.upgrade; -import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; +import static org.testng.AssertJUnit.*; + import com.linkedin.datahub.upgrade.restoreindices.RestoreIndices; +import com.linkedin.datahub.upgrade.system.elasticsearch.BuildIndices; import com.linkedin.metadata.search.elasticsearch.indexbuilder.ESIndexBuilder; +import javax.inject.Named; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.test.context.ActiveProfiles; import org.springframework.test.context.testng.AbstractTestNGSpringContextTests; import org.testng.annotations.Test; -import javax.inject.Named; - -import static org.testng.AssertJUnit.*; - @ActiveProfiles("test") -@SpringBootTest(classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) +@SpringBootTest( + classes = {UpgradeCliApplication.class, UpgradeCliApplicationTestConfiguration.class}) public class UpgradeCliApplicationTest extends AbstractTestNGSpringContextTests { - @Autowired - @Named("restoreIndices") - private RestoreIndices restoreIndices; - - @Autowired - @Named("buildIndices") - private BuildIndices buildIndices; - - @Autowired - private ESIndexBuilder esIndexBuilder; - - @Test - public void testRestoreIndicesInit() { - /* - This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean - */ - assertTrue(restoreIndices.steps().size() >= 3); - } - - @Test - public void testBuildIndicesInit() { - assertEquals("BuildIndices", buildIndices.id()); - assertTrue(buildIndices.steps().size() >= 3); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); - assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); - assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); - assertFalse(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); - } - + @Autowired + @Named("restoreIndices") + private RestoreIndices restoreIndices; + + @Autowired + @Named("buildIndices") + private BuildIndices buildIndices; + + @Autowired private ESIndexBuilder esIndexBuilder; + + @Test + public void testRestoreIndicesInit() { + /* + This might seem like a simple test however it does exercise the spring autowiring of the kafka health check bean + */ + assertTrue(restoreIndices.steps().size() >= 3); + } + + @Test + public void testBuildIndicesInit() { + assertEquals("BuildIndices", buildIndices.id()); + assertTrue(buildIndices.steps().size() >= 3); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration()); + assertNotNull(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices()); + assertTrue(esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isCloneIndices()); + assertFalse( + esIndexBuilder.getElasticSearchConfiguration().getBuildIndices().isAllowDocCountMismatch()); + } } diff --git a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java index 6cc853b2c7c4d5..0e7bf5ddd5250c 100644 --- a/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java +++ b/datahub-upgrade/src/test/java/com/linkedin/datahub/upgrade/UpgradeCliApplicationTestConfiguration.java @@ -16,27 +16,19 @@ @Import(value = {SystemAuthenticationFactory.class}) public class UpgradeCliApplicationTestConfiguration { - @MockBean - private UpgradeCli upgradeCli; + @MockBean private UpgradeCli upgradeCli; - @MockBean - private Database ebeanServer; + @MockBean private Database ebeanServer; - @MockBean - private EntityService _entityService; + @MockBean private EntityService _entityService; - @MockBean - private SearchService searchService; + @MockBean private SearchService searchService; - @MockBean - private GraphService graphService; + @MockBean private GraphService graphService; - @MockBean - private EntityRegistry entityRegistry; + @MockBean private EntityRegistry entityRegistry; - @MockBean - ConfigEntityRegistry configEntityRegistry; + @MockBean ConfigEntityRegistry configEntityRegistry; - @MockBean - public EntityIndexBuilders entityIndexBuilders; + @MockBean public EntityIndexBuilders entityIndexBuilders; } diff --git a/datahub-web-react/build.gradle b/datahub-web-react/build.gradle index fd36e5ac4bc2c3..72821d8b97dc0b 100644 --- a/datahub-web-react/build.gradle +++ b/datahub-web-react/build.gradle @@ -1,8 +1,8 @@ plugins { id 'java' + id 'distribution' + id 'com.github.node-gradle.node' } -apply plugin: 'distribution' -apply plugin: 'com.github.node-gradle.node' node { @@ -35,7 +35,7 @@ node { yarnWorkDir = file("${project.projectDir}/.gradle/yarn") // Set the work directory where node_modules should be located - nodeModulesDir = file("${project.projectDir}") + nodeProjectDir = file("${project.projectDir}") } @@ -94,7 +94,7 @@ configurations { distZip { dependsOn yarnQuickBuild - baseName 'datahub-web-react' + archiveFileName = "datahub-web-react-${archiveVersion}.${archiveExtension}" from 'dist' } @@ -112,5 +112,5 @@ jar { into('public') { from zipTree(distZip.outputs.files.first()) } - classifier = 'assets' + archiveClassifier = 'assets' } diff --git a/datahub-web-react/codegen.yml b/datahub-web-react/codegen.yml index 35728e8aeb7d49..417d6a8f1c2a6a 100644 --- a/datahub-web-react/codegen.yml +++ b/datahub-web-react/codegen.yml @@ -1,16 +1,6 @@ overwrite: true schema: - - '../datahub-graphql-core/src/main/resources/app.graphql' - - '../datahub-graphql-core/src/main/resources/entity.graphql' - - '../datahub-graphql-core/src/main/resources/search.graphql' - - '../datahub-graphql-core/src/main/resources/analytics.graphql' - - '../datahub-graphql-core/src/main/resources/recommendation.graphql' - - '../datahub-graphql-core/src/main/resources/auth.graphql' - - '../datahub-graphql-core/src/main/resources/ingestion.graphql' - - '../datahub-graphql-core/src/main/resources/timeline.graphql' - - '../datahub-graphql-core/src/main/resources/tests.graphql' - - '../datahub-graphql-core/src/main/resources/step.graphql' - - '../datahub-graphql-core/src/main/resources/lineage.graphql' + - '../datahub-graphql-core/src/main/resources/*.graphql' config: scalars: Long: number diff --git a/datahub-web-react/package.json b/datahub-web-react/package.json index c26338ea285fbf..b949c9ab9d11f9 100644 --- a/datahub-web-react/package.json +++ b/datahub-web-react/package.json @@ -92,6 +92,7 @@ "scripts": { "analyze": "source-map-explorer 'dist/static/js/*.js'", "start": "yarn run generate && BROWSER=none REACT_APP_MOCK=false craco start", + "start:dev": "yarn run generate && DISABLE_ESLINT_PLUGIN=true BROWSER=none REACT_APP_MOCK=false craco start", "start:mock": "yarn run generate && BROWSER=none REACT_APP_MOCK=true craco start", "start:e2e": "REACT_APP_MOCK=cy BROWSER=none PORT=3010 craco start", "ec2-dev": "yarn run generate && CI=true;export CI;BROWSER=none craco start", diff --git a/datahub-web-react/src/App.less b/datahub-web-react/src/App.less index a001aa103b33f6..003e86981b2b23 100644 --- a/datahub-web-react/src/App.less +++ b/datahub-web-react/src/App.less @@ -4,5 +4,5 @@ @font-face { font-family: 'Manrope'; font-style: normal; - src: local('Mnarope'), url('./fonts/manrope.woff2') format('woff2'), + src: local('Manrope'), url('./fonts/manrope.woff2') format('woff2'), } diff --git a/datahub-web-react/src/App.tsx b/datahub-web-react/src/App.tsx index 342a89f350429f..1d9f5d2b439931 100644 --- a/datahub-web-react/src/App.tsx +++ b/datahub-web-react/src/App.tsx @@ -1,4 +1,4 @@ -import React, { useEffect, useMemo, useState } from 'react'; +import React, { useEffect, useState } from 'react'; import Cookies from 'js-cookie'; import { message } from 'antd'; import { BrowserRouter as Router } from 'react-router-dom'; @@ -8,34 +8,11 @@ import { ThemeProvider } from 'styled-components'; import { Helmet, HelmetProvider } from 'react-helmet-async'; import './App.less'; import { Routes } from './app/Routes'; -import EntityRegistry from './app/entity/EntityRegistry'; -import { DashboardEntity } from './app/entity/dashboard/DashboardEntity'; -import { ChartEntity } from './app/entity/chart/ChartEntity'; -import { UserEntity } from './app/entity/user/User'; -import { GroupEntity } from './app/entity/group/Group'; -import { DatasetEntity } from './app/entity/dataset/DatasetEntity'; -import { DataFlowEntity } from './app/entity/dataFlow/DataFlowEntity'; -import { DataJobEntity } from './app/entity/dataJob/DataJobEntity'; -import { TagEntity } from './app/entity/tag/Tag'; -import { EntityRegistryContext } from './entityRegistryContext'; import { Theme } from './conf/theme/types'; import defaultThemeConfig from './conf/theme/theme_light.config.json'; import { PageRoutes } from './conf/Global'; import { isLoggedInVar } from './app/auth/checkAuthStatus'; import { GlobalCfg } from './conf'; -import { GlossaryTermEntity } from './app/entity/glossaryTerm/GlossaryTermEntity'; -import { MLFeatureEntity } from './app/entity/mlFeature/MLFeatureEntity'; -import { MLPrimaryKeyEntity } from './app/entity/mlPrimaryKey/MLPrimaryKeyEntity'; -import { MLFeatureTableEntity } from './app/entity/mlFeatureTable/MLFeatureTableEntity'; -import { MLModelEntity } from './app/entity/mlModel/MLModelEntity'; -import { MLModelGroupEntity } from './app/entity/mlModelGroup/MLModelGroupEntity'; -import { DomainEntity } from './app/entity/domain/DomainEntity'; -import { ContainerEntity } from './app/entity/container/ContainerEntity'; -import GlossaryNodeEntity from './app/entity/glossaryNode/GlossaryNodeEntity'; -import { DataPlatformEntity } from './app/entity/dataPlatform/DataPlatformEntity'; -import { DataProductEntity } from './app/entity/dataProduct/DataProductEntity'; -import { DataPlatformInstanceEntity } from './app/entity/dataPlatformInstance/DataPlatformInstanceEntity'; -import { RoleEntity } from './app/entity/Access/RoleEntity'; import possibleTypesResult from './possibleTypes.generated'; /* @@ -101,32 +78,6 @@ const App: React.VFC = () => { }); }, []); - const entityRegistry = useMemo(() => { - const register = new EntityRegistry(); - register.register(new DatasetEntity()); - register.register(new DashboardEntity()); - register.register(new ChartEntity()); - register.register(new UserEntity()); - register.register(new GroupEntity()); - register.register(new TagEntity()); - register.register(new DataFlowEntity()); - register.register(new DataJobEntity()); - register.register(new GlossaryTermEntity()); - register.register(new MLFeatureEntity()); - register.register(new MLPrimaryKeyEntity()); - register.register(new MLFeatureTableEntity()); - register.register(new MLModelEntity()); - register.register(new MLModelGroupEntity()); - register.register(new DomainEntity()); - register.register(new ContainerEntity()); - register.register(new GlossaryNodeEntity()); - register.register(new RoleEntity()); - register.register(new DataPlatformEntity()); - register.register(new DataProductEntity()); - register.register(new DataPlatformInstanceEntity()); - return register; - }, []); - return ( @@ -134,11 +85,9 @@ const App: React.VFC = () => { {dynamicThemeConfig.content.title} - - - - - + + + diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index ada9a06ab5b954..17173fd28e07f9 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -437,6 +437,11 @@ export const dataset3 = { }, ], externalUrl: 'https://data.hub', + lastModified: { + __typename: 'AuditStamp', + time: 0, + actor: null, + }, }, parentContainers: { __typename: 'ParentContainersResult', @@ -702,6 +707,7 @@ export const dataset5 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:5' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; @@ -716,6 +722,7 @@ export const dataset6 = { origin: 'PROD', customProperties: [{ key: 'propertyAKey', value: 'propertyAValue', associatedUrn: 'urn:li:dataset:6' }], externalUrl: 'https://data.hub', + lastModified: dataset3.properties?.lastModified, }, }; diff --git a/datahub-web-react/src/app/AdminConsole.tsx b/datahub-web-react/src/app/AdminConsole.tsx index 8b14ca35763d10..f6395a3bd3cb8a 100644 --- a/datahub-web-react/src/app/AdminConsole.tsx +++ b/datahub-web-react/src/app/AdminConsole.tsx @@ -4,9 +4,9 @@ import { Menu } from 'antd'; import styled from 'styled-components'; import { BankOutlined, BarChartOutlined, MenuOutlined } from '@ant-design/icons'; import Sider from 'antd/lib/layout/Sider'; -import { useGetAuthenticatedUser } from './useGetAuthenticatedUser'; import { useAppConfig } from './useAppConfig'; import { ANTD_GRAY } from './entity/shared/constants'; +import { useUserContext } from './context/useUserContext'; const ToggleContainer = styled.div` background-color: ${ANTD_GRAY[4]}; @@ -32,7 +32,7 @@ const ControlSlideOut = styled(Sider)` * Container for all views behind an authentication wall. */ export const AdminConsole = (): JSX.Element => { - const me = useGetAuthenticatedUser(); + const me = useUserContext(); const [adminConsoleOpen, setAdminConsoleOpen] = useState(false); const { config } = useAppConfig(); @@ -40,8 +40,8 @@ export const AdminConsole = (): JSX.Element => { const isAnalyticsEnabled = config?.analyticsConfig.enabled; const isPoliciesEnabled = config?.policiesConfig.enabled; - const showAnalytics = (isAnalyticsEnabled && me && me.platformPrivileges.viewAnalytics) || false; - const showPolicyBuilder = (isPoliciesEnabled && me && me.platformPrivileges.managePolicies) || false; + const showAnalytics = (isAnalyticsEnabled && me && me?.platformPrivileges?.viewAnalytics) || false; + const showPolicyBuilder = (isPoliciesEnabled && me && me?.platformPrivileges?.managePolicies) || false; const showAdminConsole = showAnalytics || showPolicyBuilder; const onMenuItemClick = () => { diff --git a/datahub-web-react/src/app/AppProviders.tsx b/datahub-web-react/src/app/AppProviders.tsx index 1ced44048b5023..00597e1cf76406 100644 --- a/datahub-web-react/src/app/AppProviders.tsx +++ b/datahub-web-react/src/app/AppProviders.tsx @@ -4,6 +4,8 @@ import { EducationStepsProvider } from '../providers/EducationStepsProvider'; import UserContextProvider from './context/UserContextProvider'; import QuickFiltersProvider from '../providers/QuickFiltersProvider'; import SearchContextProvider from './search/context/SearchContextProvider'; +import EntityRegistryProvider from './EntityRegistryProvider'; +import { BrowserTitleProvider } from './shared/BrowserTabTitleContext'; interface Props { children: React.ReactNode; @@ -13,11 +15,15 @@ export default function AppProviders({ children }: Props) { return ( - - - {children} - - + + + + + {children} + + + + ); diff --git a/datahub-web-react/src/app/EntityRegistryProvider.tsx b/datahub-web-react/src/app/EntityRegistryProvider.tsx new file mode 100644 index 00000000000000..9e283c0d07fc8a --- /dev/null +++ b/datahub-web-react/src/app/EntityRegistryProvider.tsx @@ -0,0 +1,10 @@ +import React from 'react'; +import { EntityRegistryContext } from '../entityRegistryContext'; +import useBuildEntityRegistry from './useBuildEntityRegistry'; + +const EntityRegistryProvider = ({ children }: { children: React.ReactNode }) => { + const entityRegistry = useBuildEntityRegistry(); + return {children}; +}; + +export default EntityRegistryProvider; diff --git a/datahub-web-react/src/app/ProtectedRoutes.tsx b/datahub-web-react/src/app/ProtectedRoutes.tsx index 469e0d6030b352..a3f072e764bea3 100644 --- a/datahub-web-react/src/app/ProtectedRoutes.tsx +++ b/datahub-web-react/src/app/ProtectedRoutes.tsx @@ -13,25 +13,23 @@ import EmbedLookup from './embed/lookup'; * Container for all views behind an authentication wall. */ export const ProtectedRoutes = (): JSX.Element => { - const entityRegistry = useEntityRegistry(); - return ( - - - - } /> - } /> - {entityRegistry.getEntities().map((entity) => ( + + + } /> + } /> + {useEntityRegistry() + .getEntities() + .map((entity) => ( } /> ))} - } /> - - + } /> + ); diff --git a/datahub-web-react/src/app/analytics/analytics.ts b/datahub-web-react/src/app/analytics/analytics.ts index a66d76a09cf4de..468164069cfd03 100644 --- a/datahub-web-react/src/app/analytics/analytics.ts +++ b/datahub-web-react/src/app/analytics/analytics.ts @@ -30,16 +30,17 @@ export function getMergedTrackingOptions(options?: any) { export default { page: (data?: PageData, options?: any, callback?: (...params: any[]) => any) => { + const actorUrn = Cookies.get(CLIENT_AUTH_COOKIE) || undefined; const modifiedData = { ...data, type: EventType[EventType.PageViewEvent], - actorUrn: Cookies.get(CLIENT_AUTH_COOKIE) || undefined, + actorUrn, timestamp: Date.now(), date: new Date().toString(), userAgent: navigator.userAgent, browserId: getBrowserId(), }; - if (NODE_ENV === 'test') { + if (NODE_ENV === 'test' || !actorUrn) { return null; } const trackingOptions = getMergedTrackingOptions(options); diff --git a/datahub-web-react/src/app/buildEntityRegistry.ts b/datahub-web-react/src/app/buildEntityRegistry.ts new file mode 100644 index 00000000000000..4f746815708029 --- /dev/null +++ b/datahub-web-react/src/app/buildEntityRegistry.ts @@ -0,0 +1,48 @@ +import EntityRegistry from './entity/EntityRegistry'; +import { DashboardEntity } from './entity/dashboard/DashboardEntity'; +import { ChartEntity } from './entity/chart/ChartEntity'; +import { UserEntity } from './entity/user/User'; +import { GroupEntity } from './entity/group/Group'; +import { DatasetEntity } from './entity/dataset/DatasetEntity'; +import { DataFlowEntity } from './entity/dataFlow/DataFlowEntity'; +import { DataJobEntity } from './entity/dataJob/DataJobEntity'; +import { TagEntity } from './entity/tag/Tag'; +import { GlossaryTermEntity } from './entity/glossaryTerm/GlossaryTermEntity'; +import { MLFeatureEntity } from './entity/mlFeature/MLFeatureEntity'; +import { MLPrimaryKeyEntity } from './entity/mlPrimaryKey/MLPrimaryKeyEntity'; +import { MLFeatureTableEntity } from './entity/mlFeatureTable/MLFeatureTableEntity'; +import { MLModelEntity } from './entity/mlModel/MLModelEntity'; +import { MLModelGroupEntity } from './entity/mlModelGroup/MLModelGroupEntity'; +import { DomainEntity } from './entity/domain/DomainEntity'; +import { ContainerEntity } from './entity/container/ContainerEntity'; +import GlossaryNodeEntity from './entity/glossaryNode/GlossaryNodeEntity'; +import { DataPlatformEntity } from './entity/dataPlatform/DataPlatformEntity'; +import { DataProductEntity } from './entity/dataProduct/DataProductEntity'; +import { DataPlatformInstanceEntity } from './entity/dataPlatformInstance/DataPlatformInstanceEntity'; +import { RoleEntity } from './entity/Access/RoleEntity'; + +export default function buildEntityRegistry() { + const registry = new EntityRegistry(); + registry.register(new DatasetEntity()); + registry.register(new DashboardEntity()); + registry.register(new ChartEntity()); + registry.register(new UserEntity()); + registry.register(new GroupEntity()); + registry.register(new TagEntity()); + registry.register(new DataFlowEntity()); + registry.register(new DataJobEntity()); + registry.register(new GlossaryTermEntity()); + registry.register(new MLFeatureEntity()); + registry.register(new MLPrimaryKeyEntity()); + registry.register(new MLFeatureTableEntity()); + registry.register(new MLModelEntity()); + registry.register(new MLModelGroupEntity()); + registry.register(new DomainEntity()); + registry.register(new ContainerEntity()); + registry.register(new GlossaryNodeEntity()); + registry.register(new RoleEntity()); + registry.register(new DataPlatformEntity()); + registry.register(new DataProductEntity()); + registry.register(new DataPlatformInstanceEntity()); + return registry; +} \ No newline at end of file diff --git a/datahub-web-react/src/app/embed/EmbeddedPage.tsx b/datahub-web-react/src/app/embed/EmbeddedPage.tsx index 429f83f34af6e8..603a72675c4337 100644 --- a/datahub-web-react/src/app/embed/EmbeddedPage.tsx +++ b/datahub-web-react/src/app/embed/EmbeddedPage.tsx @@ -8,9 +8,9 @@ import { VIEW_ENTITY_PAGE } from '../entity/shared/constants'; import { decodeUrn } from '../entity/shared/utils'; import CompactContext from '../shared/CompactContext'; import { useEntityRegistry } from '../useEntityRegistry'; -import { useGetAuthenticatedUserUrn } from '../useGetAuthenticatedUser'; import analytics from '../analytics/analytics'; import { EventType } from '../analytics'; +import { useUserContext } from '../context/useUserContext'; const EmbeddedPageWrapper = styled.div` max-height: 100%; @@ -39,11 +39,11 @@ export default function EmbeddedPage({ entityType }: Props) { }); }, [entityType, urn]); - const authenticatedUserUrn = useGetAuthenticatedUserUrn(); + const { urn : authenticatedUserUrn } = useUserContext(); const { data } = useGetGrantedPrivilegesQuery({ variables: { input: { - actorUrn: authenticatedUserUrn, + actorUrn: authenticatedUserUrn as string, resourceSpec: { resourceType: entityType, resourceUrn: urn }, }, }, diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index 7d40b97a66b3b6..f60eb959374527 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -33,6 +33,7 @@ import DataProductSection from '../shared/containers/profile/sidebar/DataProduct import { getDataProduct } from '../shared/utils'; import AccessManagement from '../shared/tabs/Dataset/AccessManagement/AccessManagement'; import { matchedFieldPathsRenderer } from '../../search/matches/matchedFieldPathsRenderer'; +import { getLastUpdatedMs } from './shared/utils'; const SUBTYPES = { VIEW: 'view', @@ -310,9 +311,7 @@ export class DatasetEntity implements Entity { rowCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].rowCount} columnCount={(data as any).lastProfile?.length && (data as any).lastProfile[0].columnCount} sizeInBytes={(data as any).lastProfile?.length && (data as any).lastProfile[0].sizeInBytes} - lastUpdatedMs={ - (data as any).lastOperation?.length && (data as any).lastOperation[0].lastUpdatedTimestamp - } + lastUpdatedMs={getLastUpdatedMs(data.properties, (data as any)?.lastOperation)} health={data.health} degree={(result as any).degree} paths={(result as any).paths} diff --git a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx index 36b7d251950b4c..c1e2c1aa298b6d 100644 --- a/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx +++ b/datahub-web-react/src/app/entity/dataset/profile/stats/stats/DatasetStatsSummarySubHeader.tsx @@ -3,6 +3,7 @@ import { DatasetStatsSummary as DatasetStatsSummaryObj } from '../../../../../.. import { useBaseEntity } from '../../../../shared/EntityContext'; import { GetDatasetQuery } from '../../../../../../graphql/dataset.generated'; import { DatasetStatsSummary } from '../../../shared/DatasetStatsSummary'; +import { getLastUpdatedMs } from '../../../shared/utils'; export const DatasetStatsSummarySubHeader = () => { const result = useBaseEntity(); @@ -13,15 +14,13 @@ export const DatasetStatsSummarySubHeader = () => { const maybeLastProfile = dataset?.datasetProfiles && dataset.datasetProfiles.length ? dataset.datasetProfiles[0] : undefined; - const maybeLastOperation = dataset?.operations && dataset.operations.length ? dataset.operations[0] : undefined; - const rowCount = maybeLastProfile?.rowCount; const columnCount = maybeLastProfile?.columnCount; const sizeInBytes = maybeLastProfile?.sizeInBytes; const totalSqlQueries = dataset?.usageStats?.aggregations?.totalSqlQueries; const queryCountLast30Days = maybeStatsSummary?.queryCountLast30Days; const uniqueUserCountLast30Days = maybeStatsSummary?.uniqueUserCountLast30Days; - const lastUpdatedMs = maybeLastOperation?.lastUpdatedTimestamp; + const lastUpdatedMs = getLastUpdatedMs(dataset?.properties, dataset?.operations); return ( | null | undefined, + operations: Pick[] | null | undefined, +): number | undefined { + return ( + Math.max( + properties?.lastModified?.time || 0, + (operations?.length && operations[0].lastUpdatedTimestamp) || 0, + ) || undefined + ); +} diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx index 2d82521a90df58..0610fbfa7a7704 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/CreateDataProductModal.tsx @@ -32,6 +32,7 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on variables: { input: { domainUrn: domain.urn, + id: builderState.id, properties: { name: builderState.name, description: builderState.description || undefined, @@ -49,10 +50,10 @@ export default function CreateDataProductModal({ domain, onCreateDataProduct, on onClose(); } }) - .catch(() => { + .catch(( error ) => { onClose(); message.destroy(); - message.error({ content: 'Failed to create Data Product. An unexpected error occurred' }); + message.error({ content: `Failed to create Data Product: ${error.message}.` }); }); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx new file mode 100644 index 00000000000000..a077a0308af1ff --- /dev/null +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductAdvancedOption.tsx @@ -0,0 +1,68 @@ +import React from "react"; +import { Collapse, Form, Input, Typography } from "antd"; +import styled from "styled-components"; +import { validateCustomUrnId } from '../../../shared/textUtil'; +import { DataProductBuilderFormProps } from "./types"; + + +const FormItem = styled(Form.Item)` + .ant-form-item-label { + padding-bottom: 2px; + } +`; + +const FormItemWithMargin = styled(FormItem)` + margin-bottom: 16px; +`; + +const FormItemNoMargin = styled(FormItem)` + margin-bottom: 0; +`; + +const AdvancedLabel = styled(Typography.Text)` + color: #373d44; +`; + +export function DataProductAdvancedOption({builderState, updateBuilderState }: DataProductBuilderFormProps){ + + function updateDataProductId(id: string) { + updateBuilderState({ + ...builderState, + id, + }); + } + + return ( + + Advanced Options} key="1"> + Data Product Id} + help="By default, a random UUID will be generated to uniquely identify this data product. If + you'd like to provide a custom id instead to more easily keep track of this data product, + you may provide it here. Be careful, you cannot easily change the data product id after + creation." + > + ({ + validator(_, value) { + if (value && validateCustomUrnId(value)) { + return Promise.resolve(); + } + return Promise.reject(new Error('Please enter a valid Data product id')); + }, + }), + ]} + > + updateDataProductId(e.target.value)} + /> + + + + + ) +} \ No newline at end of file diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx index b5a27a6e1b8766..98bb09098a36ea 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/DataProductBuilderForm.tsx @@ -3,18 +3,14 @@ import React from 'react'; import styled from 'styled-components'; import { Editor as MarkdownEditor } from '../../shared/tabs/Documentation/components/editor/Editor'; import { ANTD_GRAY } from '../../shared/constants'; -import { DataProductBuilderState } from './types'; +import { DataProductBuilderFormProps } from './types'; +import { DataProductAdvancedOption } from './DataProductAdvancedOption'; const StyledEditor = styled(MarkdownEditor)` border: 1px solid ${ANTD_GRAY[4]}; `; -type Props = { - builderState: DataProductBuilderState; - updateBuilderState: (newState: DataProductBuilderState) => void; -}; - -export default function DataProductBuilderForm({ builderState, updateBuilderState }: Props) { +export default function DataProductBuilderForm({ builderState, updateBuilderState }: DataProductBuilderFormProps) { function updateName(name: string) { updateBuilderState({ ...builderState, @@ -47,6 +43,7 @@ export default function DataProductBuilderForm({ builderState, updateBuilderStat Description}> + ); } diff --git a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts index 1ed3ede39cfbe4..fe22e3ed9a2a4b 100644 --- a/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts +++ b/datahub-web-react/src/app/entity/domain/DataProductsTab/types.ts @@ -1,4 +1,10 @@ export type DataProductBuilderState = { name: string; + id?: string; description?: string; }; + +export type DataProductBuilderFormProps = { + builderState: DataProductBuilderState; + updateBuilderState: (newState: DataProductBuilderState) => void; +}; \ No newline at end of file diff --git a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx index 080ee5889aec92..a6f6d9b0e28671 100644 --- a/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx +++ b/datahub-web-react/src/app/entity/glossaryTerm/GlossaryTermEntity.tsx @@ -65,7 +65,12 @@ export class GlossaryTermEntity implements Entity { useEntityQuery={useGetGlossaryTermQuery as any} headerActionItems={new Set([EntityActionItem.BATCH_ADD_GLOSSARY_TERM])} headerDropdownItems={ - new Set([EntityMenuItems.UPDATE_DEPRECATION, EntityMenuItems.MOVE, EntityMenuItems.DELETE]) + new Set([ + EntityMenuItems.UPDATE_DEPRECATION, + EntityMenuItems.CLONE, + EntityMenuItems.MOVE, + EntityMenuItems.DELETE, + ]) } isNameEditable hideBrowseBar diff --git a/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx new file mode 100644 index 00000000000000..a898a73c254efe --- /dev/null +++ b/datahub-web-react/src/app/entity/group/EditGroupDescriptionModal.tsx @@ -0,0 +1,64 @@ +import React, { useState } from 'react'; +import { Button, Modal, Form } from 'antd'; +import styled from 'styled-components'; + +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import { ANTD_GRAY } from '../shared/constants'; + +type Props = { + onClose: () => void; + onSaveAboutMe: () => void; + setStagedDescription: (des: string) => void; + stagedDescription: string | undefined; +}; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4]}; +`; + +export default function EditGroupDescriptionModal({ + onClose, + onSaveAboutMe, + setStagedDescription, + stagedDescription, +}: Props) { + const [form] = Form.useForm(); + const [aboutText,setAboutText] = useState(stagedDescription) + + function updateDescription(description: string) { + setAboutText(aboutText) + setStagedDescription(description); + + } + + const saveDescription = () => { + onSaveAboutMe(); + onClose(); + }; + + return ( + + + + + } + > +

+ +
+ +
+
+ + + ); +} diff --git a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx index d9eaed2682ea19..044b09dc185e53 100644 --- a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx @@ -16,14 +16,16 @@ import { EmptyValue, SocialDetails, EditButton, - AboutSection, - AboutSectionText, GroupsSection, + AboutSection, } from '../shared/SidebarStyledComponents'; import GroupMembersSideBarSection from './GroupMembersSideBarSection'; import { useUserContext } from '../../context/useUserContext'; - -const { Paragraph } = Typography; +import { useBrowserTitle } from '../../shared/BrowserTabTitleContext'; +import StripMarkdownText, { removeMarkdown } from '../shared/components/styled/StripMarkdownText'; +import { Editor } from '../shared/tabs/Documentation/components/editor/Editor'; +import EditGroupDescriptionModal from './EditGroupDescriptionModal'; +import { REDESIGN_COLORS } from '../shared/constants'; type SideBarData = { photoUrl: string | undefined; @@ -80,6 +82,61 @@ const GroupTitle = styled(Typography.Title)` } `; +const EditIcon = styled(EditOutlined)` + cursor: pointer; + color: ${REDESIGN_COLORS.BLUE}; +`; +const AddNewDescription = styled(Button)` + display: none; + margin: -4px; + width: 140px; +`; + +const StyledViewer = styled(Editor)` + padding-right: 8px; + display: block; + + .remirror-editor.ProseMirror { + padding: 0; + } +`; + +const DescriptionContainer = styled.div` + position: relative; + display: flex; + flex-direction: column; + width: 100%; + text-align:left; + font-weight: normal; + font + min-height: 22px; + + &:hover ${AddNewDescription} { + display: block; + } + & ins.diff { + background-color: #b7eb8f99; + text-decoration: none; + &:hover { + background-color: #b7eb8faa; + } + } + & del.diff { + background-color: #ffa39e99; + text-decoration: line-through; + &: hover { + background-color: #ffa39eaa; + } + } +`; + +const ExpandedActions = styled.div` + height: 10px; +`; +const ReadLessText = styled(Typography.Link)` + margin-right: 4px; +`; + /** * Responsible for reading & writing users. */ @@ -101,12 +158,38 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { const { url } = useRouteMatch(); const history = useHistory(); + const { updateTitle } = useBrowserTitle(); + + useEffect(()=>{ + // You can use the title and updateTitle function here + // For example, updating the title when the component mounts + if(name){ + updateTitle(`Group | ${name}`); + } + // // Don't forget to clean up the title when the component unmounts + return () => { + if(name){ // added to condition for rerendering issue + updateTitle(''); + } + }; + }, [name, updateTitle]); + /* eslint-disable @typescript-eslint/no-unused-vars */ const [editGroupModal, showEditGroupModal] = useState(false); const me = useUserContext(); const canEditGroup = me?.platformPrivileges?.manageIdentities; const [groupTitle, setGroupTitle] = useState(name); + const [expanded, setExpanded] = useState(false); + const [isUpdatingDescription, SetIsUpdatingDescription] = useState(false); + const [stagedDescription, setStagedDescription] = useState(aboutText); + const [updateName] = useUpdateNameMutation(); + const overLimit = removeMarkdown(aboutText || '').length > 80; + const ABBREVIATED_LIMIT = 80; + + useEffect(() => { + setStagedDescription(aboutText); + }, [aboutText]); useEffect(() => { setGroupTitle(groupTitle); @@ -136,12 +219,12 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { }; // About Text save - const onSaveAboutMe = (inputString) => { + const onSaveAboutMe = () => { updateCorpGroupPropertiesMutation({ variables: { urn: urn || '', input: { - description: inputString, + description: stagedDescription, }, }, }) @@ -201,16 +284,65 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { - {TITLES.about} - - - {aboutText || } - - + + {TITLES.about} + + SetIsUpdatingDescription(true)} data-testid="edit-icon" /> + + + + {(aboutText && expanded) || !overLimit ? ( + <> + {/* Read only viewer for displaying group description */} + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + ) : ( + <> + {/* Display abbreviated description with option to read more */} + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {aboutText} + + + )} + + {/* Modal for updating group description */} + {isUpdatingDescription && ( + { + SetIsUpdatingDescription(false); + setStagedDescription(aboutText); + }} + onSaveAboutMe={onSaveAboutMe} + setStagedDescription={setStagedDescription} + stagedDescription={stagedDescription} + /> + )} diff --git a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx index 41e07520a0ece5..e08853ad150bfc 100644 --- a/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx +++ b/datahub-web-react/src/app/entity/ownership/table/ActionsColumn.tsx @@ -1,6 +1,6 @@ import React from 'react'; import { Dropdown, MenuProps, Popconfirm, Typography, message, notification } from 'antd'; -import { DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; +import { CopyOutlined, DeleteOutlined, EditOutlined, MoreOutlined } from '@ant-design/icons'; import styled from 'styled-components/macro'; import { OwnershipTypeEntity } from '../../../../types.generated'; import { useDeleteOwnershipTypeMutation } from '../../../../graphql/ownership.generated'; @@ -48,6 +48,10 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe setOwnershipType(ownershipType); }; + const onCopy=() => { + navigator.clipboard.writeText(ownershipType.urn); + } + const [deleteOwnershipTypeMutation] = useDeleteOwnershipTypeMutation(); const onDelete = () => { @@ -106,6 +110,15 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe ), }, + { + key: 'copy', + icon: ( + + + Copy Urn + + ), + }, ]; const onClick: MenuProps['onClick'] = (e) => { @@ -113,6 +126,9 @@ export const ActionsColumn = ({ ownershipType, setIsOpen, setOwnershipType, refe if (key === 'edit') { editOnClick(); } + else if( key === 'copy') { + onCopy(); + } }; const menuProps: MenuProps = { diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx index 9788d36af2c65a..d60e86b0af8ca4 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/CreateGlossaryEntityModal.tsx @@ -1,8 +1,9 @@ -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components/macro'; import { EditOutlined } from '@ant-design/icons'; import { message, Button, Input, Modal, Typography, Form, Collapse } from 'antd'; import DOMPurify from 'dompurify'; +import { useHistory } from 'react-router'; import { useCreateGlossaryTermMutation, useCreateGlossaryNodeMutation, @@ -16,6 +17,7 @@ import DescriptionModal from '../components/legacy/DescriptionModal'; import { validateCustomUrnId } from '../../../shared/textUtil'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getGlossaryRootToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; +import { getEntityPath } from '../containers/profile/utils'; const StyledItem = styled(Form.Item)` margin-bottom: 0; @@ -33,6 +35,7 @@ interface Props { entityType: EntityType; onClose: () => void; refetchData?: () => void; + isCloning?: boolean; } function CreateGlossaryEntityModal(props: Props) { @@ -43,15 +46,31 @@ function CreateGlossaryEntityModal(props: Props) { const entityRegistry = useEntityRegistry(); const [stagedId, setStagedId] = useState(undefined); const [stagedName, setStagedName] = useState(''); - const [selectedParentUrn, setSelectedParentUrn] = useState(entityData.urn); + const [selectedParentUrn, setSelectedParentUrn] = useState(props.isCloning ? '' : entityData.urn); const [documentation, setDocumentation] = useState(''); const [isDocumentationModalVisible, setIsDocumentationModalVisible] = useState(false); const [createButtonDisabled, setCreateButtonDisabled] = useState(true); const refetch = useRefetch(); + const history = useHistory(); const [createGlossaryTermMutation] = useCreateGlossaryTermMutation(); const [createGlossaryNodeMutation] = useCreateGlossaryNodeMutation(); + useEffect(() => { + if (props.isCloning && entityData.entityData) { + const { properties } = entityData.entityData; + + if (properties?.name) { + setStagedName(properties.name); + form.setFieldValue('name', properties.name); + } + + if (properties?.description) { + setDocumentation(properties.description); + } + } + }, [props.isCloning, entityData.entityData, form]); + function createGlossaryEntity() { const mutation = entityType === EntityType.GlossaryTerm ? createGlossaryTermMutation : createGlossaryNodeMutation; @@ -67,7 +86,7 @@ function CreateGlossaryEntityModal(props: Props) { }, }, }) - .then(() => { + .then((res) => { message.loading({ content: 'Updating...', duration: 2 }); setTimeout(() => { analytics.event({ @@ -82,12 +101,19 @@ function CreateGlossaryEntityModal(props: Props) { refetch(); if (isInGlossaryContext) { // either refresh this current glossary node or the root nodes or root terms - const nodeToUpdate = entityData?.urn || getGlossaryRootToUpdate(entityType); + const nodeToUpdate = selectedParentUrn || getGlossaryRootToUpdate(entityType); updateGlossarySidebar([nodeToUpdate], urnsToUpdate, setUrnsToUpdate); } if (refetchData) { refetchData(); } + if (props.isCloning) { + const redirectUrn = + entityType === EntityType.GlossaryTerm + ? res.data?.createGlossaryTerm + : res.data?.createGlossaryNode; + history.push(getEntityPath(entityType, redirectUrn, entityRegistry, false, false)); + } }, 2000); }) .catch((e) => { diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx index 5d4f9d9f875cfe..664a77a731d348 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/EntityDropdown.tsx @@ -9,6 +9,7 @@ import { LinkOutlined, MoreOutlined, PlusOutlined, + CopyOutlined, } from '@ant-design/icons'; import { Redirect } from 'react-router'; import { EntityType } from '../../../../types.generated'; @@ -32,6 +33,7 @@ export enum EntityMenuItems { ADD_TERM_GROUP, DELETE, MOVE, + CLONE, } export const MenuIcon = styled(MoreOutlined)<{ fontSize?: number }>` @@ -107,6 +109,7 @@ function EntityDropdown(props: Props) { const [isCreateTermModalVisible, setIsCreateTermModalVisible] = useState(false); const [isCreateNodeModalVisible, setIsCreateNodeModalVisible] = useState(false); + const [isCloneEntityModalVisible, setIsCloneEntityModalVisible] = useState(false); const [isDeprecationModalVisible, setIsDeprecationModalVisible] = useState(false); const [isMoveModalVisible, setIsMoveModalVisible] = useState(false); @@ -177,6 +180,7 @@ function EntityDropdown(props: Props) { )} {menuItems.has(EntityMenuItems.ADD_TERM) && ( setIsCreateTermModalVisible(true)} @@ -230,6 +234,17 @@ function EntityDropdown(props: Props) { )} + {menuItems.has(EntityMenuItems.CLONE) && ( + setIsCloneEntityModalVisible(true)} + > + +  Clone + + + )} } trigger={['click']} @@ -250,6 +265,14 @@ function EntityDropdown(props: Props) { refetchData={refetchForNodes} /> )} + {isCloneEntityModalVisible && ( + setIsCloneEntityModalVisible(false)} + refetchData={entityType === EntityType.GlossaryTerm ? refetchForTerms : refetchForNodes} + isCloning + /> + )} {isDeprecationModalVisible && ( void; }; +const StyledEditor = styled(Editor)` + border: 1px solid ${ANTD_GRAY[4.5]}; +`; + export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { const [batchUpdateDeprecation] = useBatchUpdateDeprecationMutation(); const [form] = Form.useForm(); @@ -64,10 +71,11 @@ export const UpdateDeprecationModal = ({ urns, onClose, refetch }: Props) => { } + width='40%' >
- - + + diff --git a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx index 1e4737135ed748..171a36b1cfbcc3 100644 --- a/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx +++ b/datahub-web-react/src/app/entity/shared/EntityDropdown/useDeleteEntity.tsx @@ -7,6 +7,7 @@ import analytics, { EventType } from '../../../analytics'; import { useGlossaryEntityData } from '../GlossaryEntityContext'; import { getParentNodeToUpdate, updateGlossarySidebar } from '../../../glossary/utils'; import { useHandleDeleteDomain } from './useHandleDeleteDomain'; +import { removeTermFromGlossaryNode } from '../../../glossary/cacheUtils'; /** * Performs the flow for deleting an entity of a given type. @@ -30,6 +31,7 @@ function useDeleteEntity( const maybeDeleteEntity = getDeleteEntityMutation(type)(); const deleteEntity = (maybeDeleteEntity && maybeDeleteEntity[0]) || undefined; + const client = maybeDeleteEntity?.[1].client; function handleDeleteEntity() { deleteEntity?.({ @@ -54,6 +56,10 @@ function useDeleteEntity( handleDeleteDomain(); } + if (client && entityData.type === EntityType.GlossaryTerm && entityData?.parentNodes?.nodes) { + removeTermFromGlossaryNode(client, entityData.parentNodes.nodes[0].urn, urn); + } + setTimeout( () => { setHasBeenDeleted(true); diff --git a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx index f00f16647c94b4..79ec142fd801d4 100644 --- a/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx +++ b/datahub-web-react/src/app/entity/shared/GlossaryEntityContext.tsx @@ -10,6 +10,8 @@ export interface GlossaryEntityContextType { // This will happen when you edit a name, move a term/group, create a new term/group, and delete a term/group urnsToUpdate: string[]; setUrnsToUpdate: (updatdUrns: string[]) => void; + isSidebarOpen: boolean; + setIsSidebarOpen: (isOpen: boolean) => void; } export const GlossaryEntityContext = React.createContext({ @@ -18,10 +20,27 @@ export const GlossaryEntityContext = React.createContext {}, urnsToUpdate: [], setUrnsToUpdate: () => {}, + isSidebarOpen: true, + setIsSidebarOpen: () => {}, }); export const useGlossaryEntityData = () => { - const { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate } = - useContext(GlossaryEntityContext); - return { isInGlossaryContext, entityData, setEntityData, urnsToUpdate, setUrnsToUpdate }; + const { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + } = useContext(GlossaryEntityContext); + return { + isInGlossaryContext, + entityData, + setEntityData, + urnsToUpdate, + setUrnsToUpdate, + isSidebarOpen, + setIsSidebarOpen, + }; }; diff --git a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx index f60a74247ebcc2..9ec2aab193aa0b 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx @@ -1,4 +1,4 @@ -import React from 'react'; +import React, { useState } from 'react'; import { InfoCircleOutlined } from '@ant-design/icons'; import { Divider, message, Modal, Popover, Tooltip, Typography } from 'antd'; import { blue } from '@ant-design/colors'; @@ -8,6 +8,8 @@ import { Deprecation } from '../../../../../types.generated'; import { getLocaleTimezone } from '../../../../shared/time/timeUtils'; import { ANTD_GRAY } from '../../constants'; import { useBatchUpdateDeprecationMutation } from '../../../../../graphql/mutations.generated'; +import { Editor } from '../../tabs/Documentation/components/editor/Editor'; +import StripMarkdownText, { removeMarkdown } from './StripMarkdownText'; const DeprecatedContainer = styled.div` height: 18px; @@ -38,11 +40,6 @@ const DeprecatedTitle = styled(Typography.Text)` font-weight: bold; `; -const DeprecatedSubTitle = styled(Typography.Text)` - display: block; - margin-bottom: 5px; -`; - const LastEvaluatedAtLabel = styled.div` padding: 0; margin: 0; @@ -70,15 +67,42 @@ const IconGroup = styled.div` } `; +const DescriptionContainer = styled.div` + position: relative; + display: flex; + flex-direction: column; + width: 100%; + height: 100%; + min-height: 22px; + margin-bottom: 14px; +`; +const StyledViewer = styled(Editor)` + padding-right: 8px; + display: block; + + .remirror-editor.ProseMirror { + padding: 0; + } +`; + +const ExpandedActions = styled.div` + height: 10px; +`; +const ReadLessText = styled(Typography.Link)` + margin-right: 4px; +`; type Props = { urn: string; deprecation: Deprecation; refetch?: () => void; showUndeprecate: boolean | null; }; +const ABBREVIATED_LIMIT = 80; export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: Props) => { const [batchUpdateDeprecationMutation] = useBatchUpdateDeprecationMutation(); + const [expanded, setExpanded] = useState(false); + const overLimit = deprecation?.note && removeMarkdown(deprecation?.note).length > 80; /** * Deprecation Decommission Timestamp */ @@ -131,14 +155,56 @@ export const DeprecationPill = ({ deprecation, urn, refetch, showUndeprecate }: return ( {deprecation?.note !== '' && Deprecation note} {isDividerNeeded && } - {deprecation?.note !== '' && {deprecation.note}} + + {expanded || !overLimit ? ( + <> + { + deprecation?.note && deprecation?.note !== '' && + <> + + + {overLimit && ( + { + setExpanded(false); + }} + > + Read Less + + )} + + + } + + ) : ( + <> + + { + setExpanded(true); + }} + > + Read More + + + } + shouldWrap + > + {deprecation.note} + + + )} + {deprecation?.decommissionTime !== null && ( diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx index 1daf2a4c59b70f..80fc2aa223fdf5 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx @@ -1,5 +1,5 @@ import React from 'react'; -import { Pagination, Typography } from 'antd'; +import { Pagination, Spin, Typography } from 'antd'; import { LoadingOutlined } from '@ant-design/icons'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata, SearchResults as SearchResultType } from '../../../../../../types.generated'; @@ -61,7 +61,7 @@ const LoadingContainer = styled.div` `; const StyledLoading = styled(LoadingOutlined)` - font-size: 36px; + font-size: 32px; color: ${ANTD_GRAY[7]}; padding-bottom: 18px; ]`; @@ -128,7 +128,7 @@ export const EmbeddedListSearchResults = ({ {loading && ( - + } /> )} {!loading && ( diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx index 58693eca8af0e8..25e044259f240e 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityTabs.tsx @@ -39,6 +39,7 @@ export const EntityTabs = ({ tabs, selectedTab }: Props) => { return ( Column Stats - + ); } diff --git a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx index bcce994c3f0f80..1b5c3d54009da8 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Documentation/components/LinkList.tsx @@ -1,14 +1,15 @@ -import React from 'react'; +import React, { useState } from 'react'; import { Link } from 'react-router-dom'; import styled from 'styled-components/macro'; -import { message, Button, List, Typography } from 'antd'; -import { LinkOutlined, DeleteOutlined } from '@ant-design/icons'; +import { message, Button, List, Typography, Modal, Form, Input } from 'antd'; +import { LinkOutlined, DeleteOutlined, EditOutlined } from '@ant-design/icons'; import { EntityType, InstitutionalMemoryMetadata } from '../../../../../../types.generated'; -import { useEntityData } from '../../../EntityContext'; +import { useEntityData, useMutationUrn } from '../../../EntityContext'; import { useEntityRegistry } from '../../../../../useEntityRegistry'; import { ANTD_GRAY } from '../../../constants'; import { formatDateString } from '../../../containers/profile/utils'; -import { useRemoveLinkMutation } from '../../../../../../graphql/mutations.generated'; +import { useAddLinkMutation, useRemoveLinkMutation } from '../../../../../../graphql/mutations.generated'; +import analytics, { EntityActionType, EventType } from '../../../../../analytics'; const LinkListItem = styled(List.Item)` border-radius: 5px; @@ -33,10 +34,15 @@ type LinkListProps = { }; export const LinkList = ({ refetch }: LinkListProps) => { - const { urn: entityUrn, entityData } = useEntityData(); + const [editModalVisble, setEditModalVisible] = useState(false); + const [linkDetails, setLinkDetails] = useState(undefined); + const { urn: entityUrn, entityData, entityType } = useEntityData(); const entityRegistry = useEntityRegistry(); const [removeLinkMutation] = useRemoveLinkMutation(); const links = entityData?.institutionalMemory?.elements || []; + const [form] = Form.useForm(); + const [addLinkMutation] = useAddLinkMutation(); + const mutationUrn = useMutationUrn(); const handleDeleteLink = async (metadata: InstitutionalMemoryMetadata) => { try { @@ -53,8 +59,98 @@ export const LinkList = ({ refetch }: LinkListProps) => { refetch?.(); }; + const handleEditLink = (metadata: InstitutionalMemoryMetadata) => { + form.setFieldsValue({ + url: metadata.url, + label: metadata.description, + }); + setLinkDetails(metadata); + setEditModalVisible(true); + }; + + const handleClose = () => { + form.resetFields(); + setEditModalVisible(false); + }; + + const handleEdit = async (formData: any) => { + if (!linkDetails) return; + try { + await removeLinkMutation({ + variables: { input: { linkUrl: linkDetails.url, resourceUrn: linkDetails.associatedUrn || entityUrn } }, + }); + await addLinkMutation({ + variables: { input: { linkUrl: formData.url, label: formData.label, resourceUrn: mutationUrn } }, + }); + + message.success({ content: 'Link Updated', duration: 2 }); + + analytics.event({ + type: EventType.EntityActionEvent, + entityType, + entityUrn: mutationUrn, + actionType: EntityActionType.UpdateLinks, + }); + + refetch?.(); + handleClose(); + } catch (e: unknown) { + message.destroy(); + + if (e instanceof Error) { + message.error({ content: `Error updating link: \n ${e.message || ''}`, duration: 2 }); + } + } + }; + return entityData ? ( <> + + Cancel + , + , + ]} + > + + + + + + + + + {links.length > 0 && ( { renderItem={(link) => ( handleDeleteLink(link)} type="text" shape="circle" danger> - - + <> + + + } > { manager.view.focus(); } }); + + // We need to track the modified content that we expect to be in the editor. + // This way, if the content prop changes, we can update the editor content to match + // if needed. However, we don't want to update the editor content on normal typing + // changes because that would cause the cursor to jump around unexpectedly. + const [modifiedContent, setModifiedContent] = useState(content); useEffect(() => { - if (readOnly && content) { + if (readOnly && content !== undefined) { + manager.store.commands.setContent(content); + } else if (!readOnly && content !== undefined && modifiedContent !== content) { + // If we get a content change that doesn't match what we're tracking to be in the editor, + // then we need to update the editor content to match the new props content. manager.store.commands.setContent(content); + setModifiedContent(content); } // eslint-disable-next-line react-hooks/exhaustive-deps }, [readOnly, content]); @@ -97,7 +108,14 @@ export const Editor = forwardRef((props: EditorProps, ref) => { - {onChange && } + {onChange && ( + { + setModifiedContent(md); + onChange(md); + }} + /> + )} )} diff --git a/datahub-web-react/src/app/entity/shared/types.ts b/datahub-web-react/src/app/entity/shared/types.ts index ae8ab747f7cb6c..d4e3965cd66f5f 100644 --- a/datahub-web-react/src/app/entity/shared/types.ts +++ b/datahub-web-react/src/app/entity/shared/types.ts @@ -70,6 +70,7 @@ export type GenericEntityProperties = { type?: EntityType; name?: Maybe; properties?: Maybe<{ + name?: Maybe; description?: Maybe; qualifiedName?: Maybe; sourceUrl?: Maybe; diff --git a/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx b/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx index c01dd3a6359245..71bfbfcd49a16e 100644 --- a/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/user/UserInfoSideBar.tsx @@ -1,5 +1,5 @@ import { Divider, message, Space, Button, Typography, Tag } from 'antd'; -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import { EditOutlined, MailOutlined, PhoneOutlined, SlackOutlined } from '@ant-design/icons'; import { useUpdateCorpUserPropertiesMutation } from '../../../graphql/user.generated'; import { EntityRelationship, DataHubRole } from '../../../types.generated'; @@ -21,6 +21,7 @@ import { import EntityGroups from '../shared/EntityGroups'; import { mapRoleIcon } from '../../identity/user/UserUtils'; import { useUserContext } from '../../context/useUserContext'; +import { useBrowserTitle } from '../../shared/BrowserTabTitleContext'; const { Paragraph } = Typography; @@ -61,6 +62,22 @@ export default function UserInfoSideBar({ sideBarData, refetch }: Props) { const me = useUserContext(); const isProfileOwner = me?.user?.urn === urn; + const { updateTitle } = useBrowserTitle(); + + useEffect(()=>{ + // You can use the title and updateTitle function here + // For example, updating the title when the component mounts + if(name){ + updateTitle(`User | ${name}`); + } + // // Don't forget to clean up the title when the component unmounts + return () => { + if(name){ // added to condition for rerendering issue + updateTitle(''); + } + }; + }, [name, updateTitle]); + const getEditModalData = { urn, name, diff --git a/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx b/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx index a5262265fd23d7..4e424b776a8ce2 100644 --- a/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx +++ b/datahub-web-react/src/app/glossary/BusinessGlossaryPage.tsx @@ -20,6 +20,8 @@ import { import { OnboardingTour } from '../onboarding/OnboardingTour'; import { useGlossaryEntityData } from '../entity/shared/GlossaryEntityContext'; import { useUserContext } from '../context/useUserContext'; +import useToggleSidebar from './useToggleSidebar'; +import ToggleSidebarButton from '../search/ToggleSidebarButton'; export const HeaderWrapper = styled(TabToolbar)` padding: 15px 45px 10px 24px; @@ -38,6 +40,12 @@ const MainContentWrapper = styled.div` flex-direction: column; `; +const TitleContainer = styled.div` + display: flex; + align-items: center; + gap: 12px; +`; + export const MAX_BROWSER_WIDTH = 500; export const MIN_BROWSWER_WIDTH = 200; @@ -56,6 +64,7 @@ function BusinessGlossaryPage() { } = useGetRootGlossaryNodesQuery(); const entityRegistry = useEntityRegistry(); const { setEntityData } = useGlossaryEntityData(); + const { isOpen: isSidebarOpen, toggleSidebar } = useToggleSidebar(); useEffect(() => { setEntityData(null); @@ -94,7 +103,12 @@ function BusinessGlossaryPage() { )} - Business Glossary + + + + Business Glossary + +