getNeedCheckinFiles()
+ {
+ return _needCheckinFiles;
+ }
+}
\ No newline at end of file
diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java
index fe04c3629fe582..ef33bde8f61d39 100644
--- a/datahub-frontend/app/auth/AuthModule.java
+++ b/datahub-frontend/app/auth/AuthModule.java
@@ -1,5 +1,9 @@
package auth;
+import static auth.AuthUtils.*;
+import static auth.sso.oidc.OidcConfigs.*;
+import static utils.ConfigUtil.*;
+
import auth.sso.SsoConfigs;
import auth.sso.SsoManager;
import auth.sso.oidc.OidcConfigs;
@@ -18,12 +22,10 @@
import com.linkedin.util.Configuration;
import config.ConfigurationProvider;
import controllers.SsoCallbackController;
-
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
@@ -42,205 +44,227 @@
import play.cache.SyncCacheApi;
import utils.ConfigUtil;
-import static auth.AuthUtils.*;
-import static auth.sso.oidc.OidcConfigs.*;
-import static utils.ConfigUtil.*;
+/** Responsible for configuring, validating, and providing authentication related components. */
+public class AuthModule extends AbstractModule {
+ /**
+ * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration
+ * value provides a stable encryption base from which to derive the encryption key.
+ *
+ * We hash this value (SHA256), then take the first 16 bytes as the AES key.
+ */
+ private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key";
-/**
- * Responsible for configuring, validating, and providing authentication related components.
- */
-public class AuthModule extends AbstractModule {
+ private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider";
+ private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval";
+ private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries";
+
+ private final com.typesafe.config.Config _configs;
+
+ public AuthModule(final Environment environment, final com.typesafe.config.Config configs) {
+ _configs = configs;
+ }
+ @Override
+ protected void configure() {
/**
- * Pac4j Stores Session State in a browser-side cookie in encrypted fashion. This configuration
- * value provides a stable encryption base from which to derive the encryption key.
- *
- * We hash this value (SHA256), then take the first 16 bytes as the AES key.
+ * In Pac4J, you are given the option to store the profiles of authenticated users in either (i)
+ * PlayCacheSessionStore - saves your data in the Play cache or (ii) PlayCookieSessionStore
+ * saves your data in the Play session cookie However there is problem
+ * (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j
+ * profile in cookie. Whenever the profile returned by Pac4j is greater than 4096 characters,
+ * the response will be rejected by the browser. Default to PlayCacheCookieStore so that
+ * datahub-frontend container remains as a stateless service
*/
- private static final String PAC4J_AES_KEY_BASE_CONF = "play.http.secret.key";
- private static final String PAC4J_SESSIONSTORE_PROVIDER_CONF = "pac4j.sessionStore.provider";
- private static final String ENTITY_CLIENT_RETRY_INTERVAL = "entityClient.retryInterval";
- private static final String ENTITY_CLIENT_NUM_RETRIES = "entityClient.numRetries";
+ String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF);
- private final com.typesafe.config.Config _configs;
-
- public AuthModule(final Environment environment, final com.typesafe.config.Config configs) {
- _configs = configs;
+ if (sessionStoreProvider.equals("PlayCacheSessionStore")) {
+ final PlayCacheSessionStore playCacheSessionStore =
+ new PlayCacheSessionStore(getProvider(SyncCacheApi.class));
+ bind(SessionStore.class).toInstance(playCacheSessionStore);
+ bind(PlaySessionStore.class).toInstance(playCacheSessionStore);
+ } else {
+ PlayCookieSessionStore playCacheCookieStore;
+ try {
+ // To generate a valid encryption key from an input value, we first
+ // hash the input to generate a fixed-length string. Then, we convert
+ // it to hex and slice the first 16 bytes, because AES key length must strictly
+ // have a specific length.
+ final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF);
+ final String aesKeyHash =
+ DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8));
+ final String aesEncryptionKey = aesKeyHash.substring(0, 16);
+ playCacheCookieStore =
+ new PlayCookieSessionStore(new ShiroAesDataEncrypter(aesEncryptionKey.getBytes()));
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e);
+ }
+ bind(SessionStore.class).toInstance(playCacheCookieStore);
+ bind(PlaySessionStore.class).toInstance(playCacheCookieStore);
}
- @Override
- protected void configure() {
- /**
- * In Pac4J, you are given the option to store the profiles of authenticated users in either
- * (i) PlayCacheSessionStore - saves your data in the Play cache or
- * (ii) PlayCookieSessionStore saves your data in the Play session cookie
- * However there is problem (https://github.com/datahub-project/datahub/issues/4448) observed when storing the Pac4j profile in cookie.
- * Whenever the profile returned by Pac4j is greater than 4096 characters, the response will be rejected by the browser.
- * Default to PlayCacheCookieStore so that datahub-frontend container remains as a stateless service
- */
- String sessionStoreProvider = _configs.getString(PAC4J_SESSIONSTORE_PROVIDER_CONF);
-
- if (sessionStoreProvider.equals("PlayCacheSessionStore")) {
- final PlayCacheSessionStore playCacheSessionStore = new PlayCacheSessionStore(getProvider(SyncCacheApi.class));
- bind(SessionStore.class).toInstance(playCacheSessionStore);
- bind(PlaySessionStore.class).toInstance(playCacheSessionStore);
- } else {
- PlayCookieSessionStore playCacheCookieStore;
- try {
- // To generate a valid encryption key from an input value, we first
- // hash the input to generate a fixed-length string. Then, we convert
- // it to hex and slice the first 16 bytes, because AES key length must strictly
- // have a specific length.
- final String aesKeyBase = _configs.getString(PAC4J_AES_KEY_BASE_CONF);
- final String aesKeyHash = DigestUtils.sha256Hex(aesKeyBase.getBytes(StandardCharsets.UTF_8));
- final String aesEncryptionKey = aesKeyHash.substring(0, 16);
- playCacheCookieStore = new PlayCookieSessionStore(
- new ShiroAesDataEncrypter(aesEncryptionKey.getBytes()));
- } catch (Exception e) {
- throw new RuntimeException("Failed to instantiate Pac4j cookie session store!", e);
- }
- bind(SessionStore.class).toInstance(playCacheCookieStore);
- bind(PlaySessionStore.class).toInstance(playCacheCookieStore);
- }
-
- try {
- bind(SsoCallbackController.class).toConstructor(SsoCallbackController.class.getConstructor(
- SsoManager.class,
- Authentication.class,
- SystemEntityClient.class,
- AuthServiceClient.class,
- com.typesafe.config.Config.class));
- } catch (NoSuchMethodException | SecurityException e) {
- throw new RuntimeException("Failed to bind to SsoCallbackController. Cannot find constructor", e);
- }
- // logout
- final LogoutController logoutController = new LogoutController();
- logoutController.setDefaultUrl("/");
- bind(LogoutController.class).toInstance(logoutController);
+ try {
+ bind(SsoCallbackController.class)
+ .toConstructor(
+ SsoCallbackController.class.getConstructor(
+ SsoManager.class,
+ Authentication.class,
+ SystemEntityClient.class,
+ AuthServiceClient.class,
+ com.typesafe.config.Config.class));
+ } catch (NoSuchMethodException | SecurityException e) {
+ throw new RuntimeException(
+ "Failed to bind to SsoCallbackController. Cannot find constructor", e);
}
+ // logout
+ final LogoutController logoutController = new LogoutController();
+ logoutController.setDefaultUrl("/");
+ bind(LogoutController.class).toInstance(logoutController);
+ }
- @Provides @Singleton
- protected Config provideConfig(SsoManager ssoManager) {
- if (ssoManager.isSsoEnabled()) {
- final Clients clients = new Clients();
- final List clientList = new ArrayList<>();
- clientList.add(ssoManager.getSsoProvider().client());
- clients.setClients(clientList);
- final Config config = new Config(clients);
- config.setHttpActionAdapter(new PlayHttpActionAdapter());
- return config;
- }
- return new Config();
+ @Provides
+ @Singleton
+ protected Config provideConfig(SsoManager ssoManager) {
+ if (ssoManager.isSsoEnabled()) {
+ final Clients clients = new Clients();
+ final List clientList = new ArrayList<>();
+ clientList.add(ssoManager.getSsoProvider().client());
+ clients.setClients(clientList);
+ final Config config = new Config(clients);
+ config.setHttpActionAdapter(new PlayHttpActionAdapter());
+ return config;
}
+ return new Config();
+ }
- @Provides @Singleton
- protected SsoManager provideSsoManager() {
- SsoManager manager = new SsoManager();
- // Seed the SSO manager with a default SSO provider.
- if (isSsoEnabled(_configs)) {
- SsoConfigs ssoConfigs = new SsoConfigs(_configs);
- if (ssoConfigs.isOidcEnabled()) {
- // Register OIDC Provider, add to list of managers.
- OidcConfigs oidcConfigs = new OidcConfigs(_configs);
- OidcProvider oidcProvider = new OidcProvider(oidcConfigs);
- // Set the default SSO provider to this OIDC client.
- manager.setSsoProvider(oidcProvider);
- }
- }
- return manager;
+ @Provides
+ @Singleton
+ protected SsoManager provideSsoManager() {
+ SsoManager manager = new SsoManager();
+ // Seed the SSO manager with a default SSO provider.
+ if (isSsoEnabled(_configs)) {
+ SsoConfigs ssoConfigs = new SsoConfigs(_configs);
+ if (ssoConfigs.isOidcEnabled()) {
+ // Register OIDC Provider, add to list of managers.
+ OidcConfigs oidcConfigs = new OidcConfigs(_configs);
+ OidcProvider oidcProvider = new OidcProvider(oidcConfigs);
+ // Set the default SSO provider to this OIDC client.
+ manager.setSsoProvider(oidcProvider);
+ }
}
+ return manager;
+ }
- @Provides
- @Singleton
- protected Authentication provideSystemAuthentication() {
- // Returns an instance of Authentication used to authenticate system initiated calls to Metadata Service.
- String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH);
- String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH);
- final Actor systemActor =
- new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported.
- return new Authentication(systemActor, String.format("Basic %s:%s", systemClientId, systemSecret),
- Collections.emptyMap());
- }
+ @Provides
+ @Singleton
+ protected Authentication provideSystemAuthentication() {
+ // Returns an instance of Authentication used to authenticate system initiated calls to Metadata
+ // Service.
+ String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH);
+ String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH);
+ final Actor systemActor =
+ new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported.
+ return new Authentication(
+ systemActor,
+ String.format("Basic %s:%s", systemClientId, systemSecret),
+ Collections.emptyMap());
+ }
- @Provides
- @Singleton
- protected ConfigurationProvider provideConfigurationProvider() {
- AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(ConfigurationProvider.class);
- return context.getBean(ConfigurationProvider.class);
- }
+ @Provides
+ @Singleton
+ protected ConfigurationProvider provideConfigurationProvider() {
+ AnnotationConfigApplicationContext context =
+ new AnnotationConfigApplicationContext(ConfigurationProvider.class);
+ return context.getBean(ConfigurationProvider.class);
+ }
- @Provides
- @Singleton
- protected SystemEntityClient provideEntityClient(final Authentication systemAuthentication,
- final ConfigurationProvider configurationProvider) {
- return new SystemRestliEntityClient(buildRestliClient(),
- new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)),
- _configs.getInt(ENTITY_CLIENT_NUM_RETRIES), systemAuthentication,
- configurationProvider.getCache().getClient().getEntityClient());
- }
+ @Provides
+ @Singleton
+ protected SystemEntityClient provideEntityClient(
+ final Authentication systemAuthentication,
+ final ConfigurationProvider configurationProvider) {
+ return new SystemRestliEntityClient(
+ buildRestliClient(),
+ new ExponentialBackoff(_configs.getInt(ENTITY_CLIENT_RETRY_INTERVAL)),
+ _configs.getInt(ENTITY_CLIENT_NUM_RETRIES),
+ systemAuthentication,
+ configurationProvider.getCache().getClient().getEntityClient());
+ }
- @Provides
- @Singleton
- protected CloseableHttpClient provideHttpClient() {
- return HttpClients.createDefault();
- }
+ @Provides
+ @Singleton
+ protected CloseableHttpClient provideHttpClient() {
+ return HttpClients.createDefault();
+ }
- @Provides
- @Singleton
- protected AuthServiceClient provideAuthClient(Authentication systemAuthentication, CloseableHttpClient httpClient) {
- // Init a GMS auth client
- final String metadataServiceHost =
- _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH)
- : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST);
-
- final int metadataServicePort =
- _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH)
- : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT));
-
- final Boolean metadataServiceUseSsl =
- _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) ? _configs.getBoolean(
- METADATA_SERVICE_USE_SSL_CONFIG_PATH)
- : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL));
-
- return new AuthServiceClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl,
- systemAuthentication, httpClient);
- }
+ @Provides
+ @Singleton
+ protected AuthServiceClient provideAuthClient(
+ Authentication systemAuthentication, CloseableHttpClient httpClient) {
+ // Init a GMS auth client
+ final String metadataServiceHost =
+ _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH)
+ ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH)
+ : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST);
+
+ final int metadataServicePort =
+ _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH)
+ ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH)
+ : Integer.parseInt(
+ Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT));
- private com.linkedin.restli.client.Client buildRestliClient() {
- final String metadataServiceHost = utils.ConfigUtil.getString(
+ final Boolean metadataServiceUseSsl =
+ _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH)
+ ? _configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH)
+ : Boolean.parseBoolean(
+ Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL));
+
+ return new AuthServiceClient(
+ metadataServiceHost,
+ metadataServicePort,
+ metadataServiceUseSsl,
+ systemAuthentication,
+ httpClient);
+ }
+
+ private com.linkedin.restli.client.Client buildRestliClient() {
+ final String metadataServiceHost =
+ utils.ConfigUtil.getString(
_configs,
METADATA_SERVICE_HOST_CONFIG_PATH,
utils.ConfigUtil.DEFAULT_METADATA_SERVICE_HOST);
- final int metadataServicePort = utils.ConfigUtil.getInt(
+ final int metadataServicePort =
+ utils.ConfigUtil.getInt(
_configs,
utils.ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH,
utils.ConfigUtil.DEFAULT_METADATA_SERVICE_PORT);
- final boolean metadataServiceUseSsl = utils.ConfigUtil.getBoolean(
+ final boolean metadataServiceUseSsl =
+ utils.ConfigUtil.getBoolean(
_configs,
utils.ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH,
- ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL
- );
- final String metadataServiceSslProtocol = utils.ConfigUtil.getString(
+ ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL);
+ final String metadataServiceSslProtocol =
+ utils.ConfigUtil.getString(
_configs,
utils.ConfigUtil.METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH,
- ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL
- );
- return DefaultRestliClientFactory.getRestLiClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, metadataServiceSslProtocol);
- }
+ ConfigUtil.DEFAULT_METADATA_SERVICE_SSL_PROTOCOL);
+ return DefaultRestliClientFactory.getRestLiClient(
+ metadataServiceHost,
+ metadataServicePort,
+ metadataServiceUseSsl,
+ metadataServiceSslProtocol);
+ }
- protected boolean isSsoEnabled(com.typesafe.config.Config configs) {
- // If OIDC is enabled, we infer SSO to be enabled.
- return configs.hasPath(OIDC_ENABLED_CONFIG_PATH)
- && Boolean.TRUE.equals(
- Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH)));
- }
+ protected boolean isSsoEnabled(com.typesafe.config.Config configs) {
+ // If OIDC is enabled, we infer SSO to be enabled.
+ return configs.hasPath(OIDC_ENABLED_CONFIG_PATH)
+ && Boolean.TRUE.equals(Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH)));
+ }
- protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) {
- // If OIDC is enabled, we infer SSO to be enabled.
- return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH)
- && Boolean.TRUE.equals(
+ protected boolean isMetadataServiceAuthEnabled(com.typesafe.config.Config configs) {
+ // If OIDC is enabled, we infer SSO to be enabled.
+ return configs.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH)
+ && Boolean.TRUE.equals(
Boolean.parseBoolean(configs.getString(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH)));
- }
+ }
}
-
diff --git a/datahub-frontend/app/auth/AuthUtils.java b/datahub-frontend/app/auth/AuthUtils.java
index 386eee725c83d0..283a2164584b95 100644
--- a/datahub-frontend/app/auth/AuthUtils.java
+++ b/datahub-frontend/app/auth/AuthUtils.java
@@ -1,137 +1,136 @@
package auth;
import com.linkedin.common.urn.CorpuserUrn;
-import lombok.extern.slf4j.Slf4j;
-import play.mvc.Http;
-
-import javax.annotation.Nonnull;
import java.time.Duration;
import java.time.temporal.ChronoUnit;
import java.util.HashMap;
import java.util.Map;
+import javax.annotation.Nonnull;
+import lombok.extern.slf4j.Slf4j;
+import play.mvc.Http;
@Slf4j
public class AuthUtils {
- /**
- * The config path that determines whether Metadata Service Authentication is enabled.
- *
- * When enabled, the frontend server will proxy requests to the Metadata Service without requiring them to have a valid
- * frontend-issued Session Cookie. This effectively means delegating the act of authentication to the Metadata Service. It
- * is critical that if Metadata Service authentication is enabled at the frontend service layer, it is also enabled in the
- * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself.
- *
- * When disabled, the frontend server will require that all requests have a valid Session Cookie associated with them. Otherwise,
- * requests will be denied with an Unauthorized error.
- */
- public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH = "metadataService.auth.enabled";
-
- /**
- * The attribute inside session cookie representing a GMS-issued access token
- */
- public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token";
-
- /**
- * An ID used to identify system callers that are internal to DataHub. Provided via configuration.
- */
- public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId";
-
- /**
- * An Secret used to authenticate system callers that are internal to DataHub. Provided via configuration.
- */
- public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret";
-
- /**
- * Cookie name for redirect url that is manually separated from the session to reduce size
- */
- public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL";
-
- public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub");
-
- public static final String LOGIN_ROUTE = "/login";
- public static final String USER_NAME = "username";
- public static final String PASSWORD = "password";
- public static final String ACTOR = "actor";
- public static final String ACCESS_TOKEN = "token";
- public static final String FULL_NAME = "fullName";
- public static final String EMAIL = "email";
- public static final String TITLE = "title";
- public static final String INVITE_TOKEN = "inviteToken";
- public static final String RESET_TOKEN = "resetToken";
-
- /**
- * Determines whether the inbound request should be forward to downstream Metadata Service. Today, this simply
- * checks for the presence of an "Authorization" header or the presence of a valid session cookie issued
- * by the frontend.
- *
- * Note that this method DOES NOT actually verify the authentication token of an inbound request. That will
- * be handled by the downstream Metadata Service. Until then, the request should be treated as UNAUTHENTICATED.
- *
- * Returns true if the request is eligible to be forwarded to GMS, false otherwise.
- */
- public static boolean isEligibleForForwarding(Http.Request req) {
- return hasValidSessionCookie(req) || hasAuthHeader(req);
+ /**
+ * The config path that determines whether Metadata Service Authentication is enabled.
+ *
+ * When enabled, the frontend server will proxy requests to the Metadata Service without
+ * requiring them to have a valid frontend-issued Session Cookie. This effectively means
+ * delegating the act of authentication to the Metadata Service. It is critical that if Metadata
+ * Service authentication is enabled at the frontend service layer, it is also enabled in the
+ * Metadata Service itself. Otherwise, unauthenticated traffic may reach the Metadata itself.
+ *
+ *
When disabled, the frontend server will require that all requests have a valid Session
+ * Cookie associated with them. Otherwise, requests will be denied with an Unauthorized error.
+ */
+ public static final String METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH =
+ "metadataService.auth.enabled";
+
+ /** The attribute inside session cookie representing a GMS-issued access token */
+ public static final String SESSION_COOKIE_GMS_TOKEN_NAME = "token";
+
+ /**
+ * An ID used to identify system callers that are internal to DataHub. Provided via configuration.
+ */
+ public static final String SYSTEM_CLIENT_ID_CONFIG_PATH = "systemClientId";
+
+ /**
+ * An Secret used to authenticate system callers that are internal to DataHub. Provided via
+ * configuration.
+ */
+ public static final String SYSTEM_CLIENT_SECRET_CONFIG_PATH = "systemClientSecret";
+
+ /** Cookie name for redirect url that is manually separated from the session to reduce size */
+ public static final String REDIRECT_URL_COOKIE_NAME = "REDIRECT_URL";
+
+ public static final CorpuserUrn DEFAULT_ACTOR_URN = new CorpuserUrn("datahub");
+
+ public static final String LOGIN_ROUTE = "/login";
+ public static final String USER_NAME = "username";
+ public static final String PASSWORD = "password";
+ public static final String ACTOR = "actor";
+ public static final String ACCESS_TOKEN = "token";
+ public static final String FULL_NAME = "fullName";
+ public static final String EMAIL = "email";
+ public static final String TITLE = "title";
+ public static final String INVITE_TOKEN = "inviteToken";
+ public static final String RESET_TOKEN = "resetToken";
+
+ /**
+ * Determines whether the inbound request should be forward to downstream Metadata Service. Today,
+ * this simply checks for the presence of an "Authorization" header or the presence of a valid
+ * session cookie issued by the frontend.
+ *
+ *
Note that this method DOES NOT actually verify the authentication token of an inbound
+ * request. That will be handled by the downstream Metadata Service. Until then, the request
+ * should be treated as UNAUTHENTICATED.
+ *
+ *
Returns true if the request is eligible to be forwarded to GMS, false otherwise.
+ */
+ public static boolean isEligibleForForwarding(Http.Request req) {
+ return hasValidSessionCookie(req) || hasAuthHeader(req);
+ }
+
+ /**
+ * Returns true if a request has a valid session cookie issued by the frontend server. Note that
+ * this DOES NOT verify whether the token within the session cookie will be accepted by the
+ * downstream GMS service.
+ *
+ *
Note that we depend on the presence of 2 cookies, one accessible to the browser and one not,
+ * as well as their agreement to determine authentication status.
+ */
+ public static boolean hasValidSessionCookie(final Http.Request req) {
+ Map sessionCookie = req.session().data();
+ return sessionCookie.containsKey(ACCESS_TOKEN)
+ && sessionCookie.containsKey(ACTOR)
+ && req.getCookie(ACTOR).isPresent()
+ && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value());
+ }
+
+ /** Returns true if a request includes the Authorization header, false otherwise */
+ public static boolean hasAuthHeader(final Http.Request req) {
+ return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION);
+ }
+
+ /**
+ * Creates a client authentication cookie (actor cookie) with a specified TTL in hours.
+ *
+ * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub"
+ * @param ttlInHours the number of hours until the actor cookie expires after being set
+ */
+ public static Http.Cookie createActorCookie(
+ @Nonnull final String actorUrn,
+ @Nonnull final Integer ttlInHours,
+ @Nonnull final String sameSite,
+ final boolean isSecure) {
+ return Http.Cookie.builder(ACTOR, actorUrn)
+ .withHttpOnly(false)
+ .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS))
+ .withSameSite(convertSameSiteValue(sameSite))
+ .withSecure(isSecure)
+ .build();
+ }
+
+ public static Map createSessionMap(
+ final String userUrnStr, final String accessToken) {
+ final Map sessionAttributes = new HashMap<>();
+ sessionAttributes.put(ACTOR, userUrnStr);
+ sessionAttributes.put(ACCESS_TOKEN, accessToken);
+ return sessionAttributes;
+ }
+
+ private AuthUtils() {}
+
+ private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) {
+ try {
+ return Http.Cookie.SameSite.valueOf(sameSiteValue);
+ } catch (IllegalArgumentException e) {
+ log.warn(
+ String.format(
+ "Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue),
+ e);
+ return Http.Cookie.SameSite.LAX;
}
-
- /**
- * Returns true if a request has a valid session cookie issued by the frontend server.
- * Note that this DOES NOT verify whether the token within the session cookie will be accepted
- * by the downstream GMS service.
- *
- * Note that we depend on the presence of 2 cookies, one accessible to the browser and one not,
- * as well as their agreement to determine authentication status.
- */
- public static boolean hasValidSessionCookie(final Http.Request req) {
- Map sessionCookie = req.session().data();
- return sessionCookie.containsKey(ACCESS_TOKEN)
- && sessionCookie.containsKey(ACTOR)
- && req.getCookie(ACTOR).isPresent()
- && req.session().data().get(ACTOR).equals(req.getCookie(ACTOR).get().value());
- }
-
- /**
- * Returns true if a request includes the Authorization header, false otherwise
- */
- public static boolean hasAuthHeader(final Http.Request req) {
- return req.getHeaders().contains(Http.HeaderNames.AUTHORIZATION);
- }
-
- /**
- * Creates a client authentication cookie (actor cookie) with a specified TTL in hours.
- *
- * @param actorUrn the urn of the authenticated actor, e.g. "urn:li:corpuser:datahub"
- * @param ttlInHours the number of hours until the actor cookie expires after being set
- */
- public static Http.Cookie createActorCookie(
- @Nonnull final String actorUrn,
- @Nonnull final Integer ttlInHours,
- @Nonnull final String sameSite,
- final boolean isSecure
- ) {
- return Http.Cookie.builder(ACTOR, actorUrn)
- .withHttpOnly(false)
- .withMaxAge(Duration.of(ttlInHours, ChronoUnit.HOURS))
- .withSameSite(convertSameSiteValue(sameSite))
- .withSecure(isSecure)
- .build();
- }
-
- public static Map createSessionMap(final String userUrnStr, final String accessToken) {
- final Map sessionAttributes = new HashMap<>();
- sessionAttributes.put(ACTOR, userUrnStr);
- sessionAttributes.put(ACCESS_TOKEN, accessToken);
- return sessionAttributes;
- }
-
- private AuthUtils() { }
-
- private static Http.Cookie.SameSite convertSameSiteValue(@Nonnull final String sameSiteValue) {
- try {
- return Http.Cookie.SameSite.valueOf(sameSiteValue);
- } catch (IllegalArgumentException e) {
- log.warn(String.format("Invalid AUTH_COOKIE_SAME_SITE value: %s. Using LAX instead.", sameSiteValue), e);
- return Http.Cookie.SameSite.LAX;
- }
- }
-
+ }
}
diff --git a/datahub-frontend/app/auth/Authenticator.java b/datahub-frontend/app/auth/Authenticator.java
index ae847b318dce28..8536fc7e016956 100644
--- a/datahub-frontend/app/auth/Authenticator.java
+++ b/datahub-frontend/app/auth/Authenticator.java
@@ -1,48 +1,49 @@
package auth;
+import static auth.AuthUtils.*;
+
import com.typesafe.config.Config;
import java.util.Optional;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
import javax.inject.Inject;
import play.mvc.Http;
import play.mvc.Result;
import play.mvc.Security;
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-
-import static auth.AuthUtils.*;
-
-
/**
* Implementation of base Play Authentication used to determine if a request to a route should be
* authenticated.
*/
public class Authenticator extends Security.Authenticator {
- private final boolean metadataServiceAuthEnabled;
+ private final boolean metadataServiceAuthEnabled;
- @Inject
- public Authenticator(@Nonnull Config config) {
- this.metadataServiceAuthEnabled = config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH)
+ @Inject
+ public Authenticator(@Nonnull Config config) {
+ this.metadataServiceAuthEnabled =
+ config.hasPath(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH)
&& config.getBoolean(METADATA_SERVICE_AUTH_ENABLED_CONFIG_PATH);
+ }
+
+ @Override
+ public Optional getUsername(@Nonnull Http.Request req) {
+ if (this.metadataServiceAuthEnabled) {
+ // If Metadata Service auth is enabled, we only want to verify presence of the
+ // "Authorization" header OR the presence of a frontend generated session cookie.
+ // At this time, the actor is still considered to be unauthenicated.
+ return Optional.ofNullable(
+ AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null);
+ } else {
+ // If Metadata Service auth is not enabled, verify the presence of a valid session cookie.
+ return Optional.ofNullable(
+ AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null);
}
+ }
- @Override
- public Optional getUsername(@Nonnull Http.Request req) {
- if (this.metadataServiceAuthEnabled) {
- // If Metadata Service auth is enabled, we only want to verify presence of the
- // "Authorization" header OR the presence of a frontend generated session cookie.
- // At this time, the actor is still considered to be unauthenicated.
- return Optional.ofNullable(AuthUtils.isEligibleForForwarding(req) ? "urn:li:corpuser:UNKNOWN" : null);
- } else {
- // If Metadata Service auth is not enabled, verify the presence of a valid session cookie.
- return Optional.ofNullable(AuthUtils.hasValidSessionCookie(req) ? req.session().data().get(ACTOR) : null);
- }
- }
-
- @Override
- @Nonnull
- public Result onUnauthorized(@Nullable Http.Request req) {
- return unauthorized();
- }
+ @Override
+ @Nonnull
+ public Result onUnauthorized(@Nullable Http.Request req) {
+ return unauthorized();
+ }
}
diff --git a/datahub-frontend/app/auth/ConfigUtil.java b/datahub-frontend/app/auth/ConfigUtil.java
index e0999ee00be386..9fbed91ce6a10c 100644
--- a/datahub-frontend/app/auth/ConfigUtil.java
+++ b/datahub-frontend/app/auth/ConfigUtil.java
@@ -3,20 +3,20 @@
import com.typesafe.config.Config;
import java.util.Optional;
-
public class ConfigUtil {
- private ConfigUtil() {
- }
+ private ConfigUtil() {}
public static String getRequired(final Config configs, final String path) {
if (!configs.hasPath(path)) {
- throw new IllegalArgumentException(String.format("Missing required config with path %s", path));
+ throw new IllegalArgumentException(
+ String.format("Missing required config with path %s", path));
}
return configs.getString(path);
}
- public static String getOptional(final Config configs, final String path, final String defaultVal) {
+ public static String getOptional(
+ final Config configs, final String path, final String defaultVal) {
if (!configs.hasPath(path)) {
return defaultVal;
}
diff --git a/datahub-frontend/app/auth/CookieConfigs.java b/datahub-frontend/app/auth/CookieConfigs.java
index b6da9b7a1833c4..63b2ce61aaf9bb 100644
--- a/datahub-frontend/app/auth/CookieConfigs.java
+++ b/datahub-frontend/app/auth/CookieConfigs.java
@@ -1,6 +1,5 @@
package auth;
-
import com.typesafe.config.Config;
public class CookieConfigs {
@@ -16,12 +15,18 @@ public class CookieConfigs {
private final boolean _authCookieSecure;
public CookieConfigs(final Config configs) {
- _ttlInHours = configs.hasPath(SESSION_TTL_CONFIG_PATH) ? configs.getInt(SESSION_TTL_CONFIG_PATH)
- : DEFAULT_SESSION_TTL_HOURS;
- _authCookieSameSite = configs.hasPath(AUTH_COOKIE_SAME_SITE) ? configs.getString(AUTH_COOKIE_SAME_SITE)
- : DEFAULT_AUTH_COOKIE_SAME_SITE;
- _authCookieSecure = configs.hasPath(AUTH_COOKIE_SECURE) ? configs.getBoolean(AUTH_COOKIE_SECURE)
- : DEFAULT_AUTH_COOKIE_SECURE;
+ _ttlInHours =
+ configs.hasPath(SESSION_TTL_CONFIG_PATH)
+ ? configs.getInt(SESSION_TTL_CONFIG_PATH)
+ : DEFAULT_SESSION_TTL_HOURS;
+ _authCookieSameSite =
+ configs.hasPath(AUTH_COOKIE_SAME_SITE)
+ ? configs.getString(AUTH_COOKIE_SAME_SITE)
+ : DEFAULT_AUTH_COOKIE_SAME_SITE;
+ _authCookieSecure =
+ configs.hasPath(AUTH_COOKIE_SECURE)
+ ? configs.getBoolean(AUTH_COOKIE_SECURE)
+ : DEFAULT_AUTH_COOKIE_SECURE;
}
public int getTtlInHours() {
diff --git a/datahub-frontend/app/auth/JAASConfigs.java b/datahub-frontend/app/auth/JAASConfigs.java
index f39c20aceb6f9b..529bf98e1fdcf2 100644
--- a/datahub-frontend/app/auth/JAASConfigs.java
+++ b/datahub-frontend/app/auth/JAASConfigs.java
@@ -6,17 +6,18 @@
*/
public class JAASConfigs {
- public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled";
+ public static final String JAAS_ENABLED_CONFIG_PATH = "auth.jaas.enabled";
- private Boolean _isEnabled = true;
+ private Boolean _isEnabled = true;
- public JAASConfigs(final com.typesafe.config.Config configs) {
- if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH) && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) {
- _isEnabled = false;
- }
+ public JAASConfigs(final com.typesafe.config.Config configs) {
+ if (configs.hasPath(JAAS_ENABLED_CONFIG_PATH)
+ && !configs.getBoolean(JAAS_ENABLED_CONFIG_PATH)) {
+ _isEnabled = false;
}
+ }
- public boolean isJAASEnabled() {
- return _isEnabled;
- }
+ public boolean isJAASEnabled() {
+ return _isEnabled;
+ }
}
diff --git a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java
index 3114da92d7d79a..772c2c8f92f28c 100644
--- a/datahub-frontend/app/auth/NativeAuthenticationConfigs.java
+++ b/datahub-frontend/app/auth/NativeAuthenticationConfigs.java
@@ -1,23 +1,27 @@
package auth;
-/**
- * Currently, this config enables or disable native user authentication.
- */
+/** Currently, this config enables or disable native user authentication. */
public class NativeAuthenticationConfigs {
public static final String NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH = "auth.native.enabled";
- public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH = "auth.native.signUp.enforceValidEmail";
+ public static final String NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH =
+ "auth.native.signUp.enforceValidEmail";
private Boolean _isEnabled = true;
private Boolean _isEnforceValidEmailEnabled = true;
public NativeAuthenticationConfigs(final com.typesafe.config.Config configs) {
if (configs.hasPath(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH)) {
- _isEnabled = Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString());
+ _isEnabled =
+ Boolean.parseBoolean(
+ configs.getValue(NATIVE_AUTHENTICATION_ENABLED_CONFIG_PATH).toString());
}
if (configs.hasPath(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)) {
_isEnforceValidEmailEnabled =
- Boolean.parseBoolean(configs.getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH).toString());
+ Boolean.parseBoolean(
+ configs
+ .getValue(NATIVE_AUTHENTICATION_ENFORCE_VALID_EMAIL_ENABLED_CONFIG_PATH)
+ .toString());
}
}
diff --git a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java
index a6dbd69a938893..223ac669bd6eae 100644
--- a/datahub-frontend/app/auth/cookie/CustomCookiesModule.java
+++ b/datahub-frontend/app/auth/cookie/CustomCookiesModule.java
@@ -7,16 +7,15 @@
import play.api.mvc.FlashCookieBaker;
import play.api.mvc.SessionCookieBaker;
-
public class CustomCookiesModule extends AbstractModule {
@Override
public void configure() {
bind(CookieSigner.class).toProvider(CookieSignerProvider.class);
- // We override the session cookie baker to not use a fallback, this prevents using an old URL Encoded cookie
+ // We override the session cookie baker to not use a fallback, this prevents using an old URL
+ // Encoded cookie
bind(SessionCookieBaker.class).to(CustomSessionCookieBaker.class);
// We don't care about flash cookies, we don't use them
bind(FlashCookieBaker.class).to(DefaultFlashCookieBaker.class);
}
-
}
diff --git a/datahub-frontend/app/auth/sso/SsoConfigs.java b/datahub-frontend/app/auth/sso/SsoConfigs.java
index 062054173bddb7..1f8455e773ffb1 100644
--- a/datahub-frontend/app/auth/sso/SsoConfigs.java
+++ b/datahub-frontend/app/auth/sso/SsoConfigs.java
@@ -2,24 +2,19 @@
import static auth.ConfigUtil.*;
-
-/**
- * Class responsible for extracting and validating top-level SSO related configurations.
- */
+/** Class responsible for extracting and validating top-level SSO related configurations. */
public class SsoConfigs {
- /**
- * Required configs
- */
+ /** Required configs */
private static final String AUTH_BASE_URL_CONFIG_PATH = "auth.baseUrl";
+
private static final String AUTH_BASE_CALLBACK_PATH_CONFIG_PATH = "auth.baseCallbackPath";
private static final String AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH = "auth.successRedirectPath";
public static final String OIDC_ENABLED_CONFIG_PATH = "auth.oidc.enabled";
- /**
- * Default values
- */
+ /** Default values */
private static final String DEFAULT_BASE_CALLBACK_PATH = "/callback";
+
private static final String DEFAULT_SUCCESS_REDIRECT_PATH = "/";
private final String _authBaseUrl;
@@ -29,17 +24,14 @@ public class SsoConfigs {
public SsoConfigs(final com.typesafe.config.Config configs) {
_authBaseUrl = getRequired(configs, AUTH_BASE_URL_CONFIG_PATH);
- _authBaseCallbackPath = getOptional(
- configs,
- AUTH_BASE_CALLBACK_PATH_CONFIG_PATH,
- DEFAULT_BASE_CALLBACK_PATH);
- _authSuccessRedirectPath = getOptional(
- configs,
- AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH,
- DEFAULT_SUCCESS_REDIRECT_PATH);
- _oidcEnabled = configs.hasPath(OIDC_ENABLED_CONFIG_PATH)
- && Boolean.TRUE.equals(
- Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH)));
+ _authBaseCallbackPath =
+ getOptional(configs, AUTH_BASE_CALLBACK_PATH_CONFIG_PATH, DEFAULT_BASE_CALLBACK_PATH);
+ _authSuccessRedirectPath =
+ getOptional(configs, AUTH_SUCCESS_REDIRECT_PATH_CONFIG_PATH, DEFAULT_SUCCESS_REDIRECT_PATH);
+ _oidcEnabled =
+ configs.hasPath(OIDC_ENABLED_CONFIG_PATH)
+ && Boolean.TRUE.equals(
+ Boolean.parseBoolean(configs.getString(OIDC_ENABLED_CONFIG_PATH)));
}
public String getAuthBaseUrl() {
diff --git a/datahub-frontend/app/auth/sso/SsoManager.java b/datahub-frontend/app/auth/sso/SsoManager.java
index 739ce3f1ba4508..bf33f4148a5531 100644
--- a/datahub-frontend/app/auth/sso/SsoManager.java
+++ b/datahub-frontend/app/auth/sso/SsoManager.java
@@ -2,19 +2,16 @@
import javax.annotation.Nonnull;
-
-/**
- * Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists.
- */
+/** Singleton class that stores & serves reference to a single {@link SsoProvider} if one exists. */
public class SsoManager {
private SsoProvider> _provider; // Only one active provider at a time.
- public SsoManager() { }
+ public SsoManager() {}
/**
- * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been
- * provided to the manager.
+ * Returns true if SSO is enabled, meaning a non-null {@link SsoProvider} has been provided to the
+ * manager.
*
* @return true if SSO logic is enabled, false otherwise.
*/
@@ -34,8 +31,8 @@ public void setSsoProvider(@Nonnull final SsoProvider> provider) {
/**
* Gets the active {@link SsoProvider} instance.
*
- * @return the {@SsoProvider} that should be used during authentication and on
- * IdP callback, or null if SSO is not enabled.
+ * @return the {@SsoProvider} that should be used during authentication and on IdP callback, or
+ * null if SSO is not enabled.
*/
public SsoProvider> getSsoProvider() {
return _provider;
diff --git a/datahub-frontend/app/auth/sso/SsoProvider.java b/datahub-frontend/app/auth/sso/SsoProvider.java
index f7454d599ba995..a0947b52b92ae6 100644
--- a/datahub-frontend/app/auth/sso/SsoProvider.java
+++ b/datahub-frontend/app/auth/sso/SsoProvider.java
@@ -3,15 +3,10 @@
import org.pac4j.core.client.Client;
import org.pac4j.core.credentials.Credentials;
-/**
- * A thin interface over a Pac4j {@link Client} object and its
- * associated configurations.
- */
+/** A thin interface over a Pac4j {@link Client} object and its associated configurations. */
public interface SsoProvider {
- /**
- * The protocol used for SSO.
- */
+ /** The protocol used for SSO. */
enum SsoProtocol {
OIDC("oidc");
// SAML -- not yet supported.
@@ -28,19 +23,12 @@ public String getCommonName() {
}
}
- /**
- * Returns the configs required by the provider.
- */
+ /** Returns the configs required by the provider. */
C configs();
- /**
- * Returns the SSO protocol associated with the provider instance.
- */
+ /** Returns the SSO protocol associated with the provider instance. */
SsoProtocol protocol();
- /**
- * Retrieves an initialized Pac4j {@link Client}.
- */
+ /** Retrieves an initialized Pac4j {@link Client}. */
Client extends Credentials> client();
-
}
diff --git a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java
index baca144610ec4c..fa676d2d16c904 100644
--- a/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java
+++ b/datahub-frontend/app/auth/sso/oidc/OidcAuthorizationGenerator.java
@@ -1,9 +1,9 @@
package auth.sso.oidc;
+import com.nimbusds.jwt.JWT;
+import com.nimbusds.jwt.JWTParser;
import java.util.Map.Entry;
import java.util.Optional;
-
-import com.nimbusds.jwt.JWTParser;
import org.pac4j.core.authorization.generator.AuthorizationGenerator;
import org.pac4j.core.context.WebContext;
import org.pac4j.core.profile.AttributeLocation;
@@ -14,44 +14,43 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-import com.nimbusds.jwt.JWT;
-
public class OidcAuthorizationGenerator implements AuthorizationGenerator {
- private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class);
-
- private final ProfileDefinition> profileDef;
+ private static final Logger logger = LoggerFactory.getLogger(OidcAuthorizationGenerator.class);
- private final OidcConfigs oidcConfigs;
+ private final ProfileDefinition> profileDef;
- public OidcAuthorizationGenerator(final ProfileDefinition> profileDef, final OidcConfigs oidcConfigs) {
- this.profileDef = profileDef;
- this.oidcConfigs = oidcConfigs;
- }
+ private final OidcConfigs oidcConfigs;
- @Override
- public Optional generate(WebContext context, UserProfile profile) {
- if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) {
- try {
- final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue());
-
- CommonProfile commonProfile = new CommonProfile();
-
- for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) {
- final String claimName = entry.getKey();
-
- if (profile.getAttribute(claimName) == null) {
- profileDef.convertAndAdd(commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue());
- }
- }
-
- return Optional.of(commonProfile);
- } catch (Exception e) {
- logger.warn("Cannot parse access token claims", e);
- }
+ public OidcAuthorizationGenerator(
+ final ProfileDefinition> profileDef, final OidcConfigs oidcConfigs) {
+ this.profileDef = profileDef;
+ this.oidcConfigs = oidcConfigs;
+ }
+
+ @Override
+ public Optional generate(WebContext context, UserProfile profile) {
+ if (oidcConfigs.getExtractJwtAccessTokenClaims().orElse(false)) {
+ try {
+ final JWT jwt = JWTParser.parse(((OidcProfile) profile).getAccessToken().getValue());
+
+ CommonProfile commonProfile = new CommonProfile();
+
+ for (final Entry entry : jwt.getJWTClaimsSet().getClaims().entrySet()) {
+ final String claimName = entry.getKey();
+
+ if (profile.getAttribute(claimName) == null) {
+ profileDef.convertAndAdd(
+ commonProfile, AttributeLocation.PROFILE_ATTRIBUTE, claimName, entry.getValue());
+ }
}
-
- return Optional.ofNullable(profile);
+
+ return Optional.of(commonProfile);
+ } catch (Exception e) {
+ logger.warn("Cannot parse access token claims", e);
+ }
}
-
+
+ return Optional.ofNullable(profile);
+ }
}
diff --git a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java
index 7164710f4e0ded..fa562f54312eca 100644
--- a/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java
+++ b/datahub-frontend/app/auth/sso/oidc/OidcCallbackLogic.java
@@ -1,6 +1,13 @@
package auth.sso.oidc;
+import static auth.AuthUtils.*;
+import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME;
+import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME;
+import static org.pac4j.play.store.PlayCookieSessionStore.*;
+import static play.mvc.Results.internalServerError;
+
import auth.CookieConfigs;
+import auth.sso.SsoManager;
import client.AuthServiceClient;
import com.datahub.authentication.Authentication;
import com.linkedin.common.AuditStamp;
@@ -59,23 +66,16 @@
import org.pac4j.core.util.Pac4jConstants;
import org.pac4j.play.PlayWebContext;
import play.mvc.Result;
-import auth.sso.SsoManager;
-
-import static auth.AuthUtils.*;
-import static com.linkedin.metadata.Constants.CORP_USER_ENTITY_NAME;
-import static com.linkedin.metadata.Constants.GROUP_MEMBERSHIP_ASPECT_NAME;
-import static org.pac4j.play.store.PlayCookieSessionStore.*;
-import static play.mvc.Results.internalServerError;
-
/**
- * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects back to D
- * DataHub after an authentication attempt.
+ * This class contains the logic that is executed when an OpenID Connect Identity Provider redirects
+ * back to D DataHub after an authentication attempt.
*
- * On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract
- * basic information about the user including their name, email, groups, & more. If just-in-time provisioning
- * is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for the user, along with any Groups
- * ({@link CorpGroupSnapshot}) that can be extracted, only doing so if the user does not already exist.
+ * On receiving a user profile from the IdP (using /userInfo endpoint), we attempt to extract
+ * basic information about the user including their name, email, groups, & more. If just-in-time
+ * provisioning is enabled, we also attempt to create a DataHub User ({@link CorpUserSnapshot}) for
+ * the user, along with any Groups ({@link CorpGroupSnapshot}) that can be extracted, only doing so
+ * if the user does not already exist.
*/
@Slf4j
public class OidcCallbackLogic extends DefaultCallbackLogic {
@@ -86,9 +86,12 @@ public class OidcCallbackLogic extends DefaultCallbackLogic httpActionAdapter, String defaultUrl, Boolean saveInSession,
- Boolean multiProfile, Boolean renewSession, String defaultClient) {
+ public Result perform(
+ PlayWebContext context,
+ Config config,
+ HttpActionAdapter httpActionAdapter,
+ String defaultUrl,
+ Boolean saveInSession,
+ Boolean multiProfile,
+ Boolean renewSession,
+ String defaultClient) {
setContextRedirectUrl(context);
final Result result =
- super.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession,
+ super.perform(
+ context,
+ config,
+ httpActionAdapter,
+ defaultUrl,
+ saveInSession,
+ multiProfile,
+ renewSession,
defaultClient);
// Handle OIDC authentication errors.
@@ -119,14 +135,25 @@ public Result perform(PlayWebContext context, Config config,
@SuppressWarnings("unchecked")
private void setContextRedirectUrl(PlayWebContext context) {
- Optional redirectUrl = context.getRequestCookies().stream()
- .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName())).findFirst();
+ Optional redirectUrl =
+ context.getRequestCookies().stream()
+ .filter(cookie -> REDIRECT_URL_COOKIE_NAME.equals(cookie.getName()))
+ .findFirst();
redirectUrl.ifPresent(
- cookie -> context.getSessionStore().set(context, Pac4jConstants.REQUESTED_URL,
- JAVA_SER_HELPER.deserializeFromBytes(uncompressBytes(Base64.getDecoder().decode(cookie.getValue())))));
+ cookie ->
+ context
+ .getSessionStore()
+ .set(
+ context,
+ Pac4jConstants.REQUESTED_URL,
+ JAVA_SER_HELPER.deserializeFromBytes(
+ uncompressBytes(Base64.getDecoder().decode(cookie.getValue())))));
}
- private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result result, final PlayWebContext context,
+ private Result handleOidcCallback(
+ final OidcConfigs oidcConfigs,
+ final Result result,
+ final PlayWebContext context,
final ProfileManager profileManager) {
log.debug("Beginning OIDC Callback Handling...");
@@ -134,14 +161,17 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re
if (profileManager.isAuthenticated()) {
// If authenticated, the user should have a profile.
final CommonProfile profile = (CommonProfile) profileManager.get(true).get();
- log.debug(String.format("Found authenticated user with profile %s", profile.getAttributes().toString()));
+ log.debug(
+ String.format(
+ "Found authenticated user with profile %s", profile.getAttributes().toString()));
// Extract the User name required to log into DataHub.
final String userName = extractUserNameOrThrow(oidcConfigs, profile);
final CorpuserUrn corpUserUrn = new CorpuserUrn(userName);
try {
- // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does not exist.
+ // If just-in-time User Provisioning is enabled, try to create the DataHub user if it does
+ // not exist.
if (oidcConfigs.isJitProvisioningEnabled()) {
log.debug("Just-in-time provisioning is enabled. Beginning provisioning process...");
CorpUserSnapshot extractedUser = extractUser(corpUserUrn, profile);
@@ -150,7 +180,8 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re
// Extract groups & provision them.
List extractedGroups = extractGroups(profile);
tryProvisionGroups(extractedGroups);
- // Add users to groups on DataHub. Note that this clears existing group membership for a user if it already exists.
+ // Add users to groups on DataHub. Note that this clears existing group membership for a
+ // user if it already exists.
updateGroupMembership(corpUserUrn, createGroupMembership(extractedGroups));
}
} else if (oidcConfigs.isPreProvisioningRequired()) {
@@ -160,55 +191,69 @@ private Result handleOidcCallback(final OidcConfigs oidcConfigs, final Result re
}
// Update user status to active on login.
// If we want to prevent certain users from logging in, here's where we'll want to do it.
- setUserStatus(corpUserUrn, new CorpUserStatus().setStatus(Constants.CORP_USER_STATUS_ACTIVE)
- .setLastModified(new AuditStamp().setActor(Urn.createFromString(Constants.SYSTEM_ACTOR))
- .setTime(System.currentTimeMillis())));
+ setUserStatus(
+ corpUserUrn,
+ new CorpUserStatus()
+ .setStatus(Constants.CORP_USER_STATUS_ACTIVE)
+ .setLastModified(
+ new AuditStamp()
+ .setActor(Urn.createFromString(Constants.SYSTEM_ACTOR))
+ .setTime(System.currentTimeMillis())));
} catch (Exception e) {
log.error("Failed to perform post authentication steps. Redirecting to error page.", e);
return internalServerError(
- String.format("Failed to perform post authentication steps. Error message: %s", e.getMessage()));
+ String.format(
+ "Failed to perform post authentication steps. Error message: %s", e.getMessage()));
}
// Successfully logged in - Generate GMS login token
final String accessToken = _authClient.generateSessionTokenForUser(corpUserUrn.getId());
return result
- .withSession(createSessionMap(corpUserUrn.toString(), accessToken))
- .withCookies(
- createActorCookie(
- corpUserUrn.toString(),
- _cookieConfigs.getTtlInHours(),
- _cookieConfigs.getAuthCookieSameSite(),
- _cookieConfigs.getAuthCookieSecure()
- )
- );
+ .withSession(createSessionMap(corpUserUrn.toString(), accessToken))
+ .withCookies(
+ createActorCookie(
+ corpUserUrn.toString(),
+ _cookieConfigs.getTtlInHours(),
+ _cookieConfigs.getAuthCookieSameSite(),
+ _cookieConfigs.getAuthCookieSecure()));
}
return internalServerError(
"Failed to authenticate current user. Cannot find valid identity provider profile in session.");
}
- private String extractUserNameOrThrow(final OidcConfigs oidcConfigs, final CommonProfile profile) {
+ private String extractUserNameOrThrow(
+ final OidcConfigs oidcConfigs, final CommonProfile profile) {
// Ensure that the attribute exists (was returned by IdP)
if (!profile.containsAttribute(oidcConfigs.getUserNameClaim())) {
- throw new RuntimeException(String.format(
- "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s",
- oidcConfigs.getUserNameClaim(), oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString()));
+ throw new RuntimeException(
+ String.format(
+ "Failed to resolve user name claim from profile provided by Identity Provider. Missing attribute. Attribute: '%s', Regex: '%s', Profile: %s",
+ oidcConfigs.getUserNameClaim(),
+ oidcConfigs.getUserNameClaimRegex(),
+ profile.getAttributes().toString()));
}
final String userNameClaim = (String) profile.getAttribute(oidcConfigs.getUserNameClaim());
- final Optional mappedUserName = extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim);
-
- return mappedUserName.orElseThrow(() -> new RuntimeException(
- String.format("Failed to extract DataHub username from username claim %s using regex %s. Profile: %s",
- userNameClaim, oidcConfigs.getUserNameClaimRegex(), profile.getAttributes().toString())));
+ final Optional mappedUserName =
+ extractRegexGroup(oidcConfigs.getUserNameClaimRegex(), userNameClaim);
+
+ return mappedUserName.orElseThrow(
+ () ->
+ new RuntimeException(
+ String.format(
+ "Failed to extract DataHub username from username claim %s using regex %s. Profile: %s",
+ userNameClaim,
+ oidcConfigs.getUserNameClaimRegex(),
+ profile.getAttributes().toString())));
}
- /**
- * Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}.
- */
+ /** Attempts to map to an OIDC {@link CommonProfile} (userInfo) to a {@link CorpUserSnapshot}. */
private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) {
- log.debug(String.format("Attempting to extract user from OIDC profile %s", profile.getAttributes().toString()));
+ log.debug(
+ String.format(
+ "Attempting to extract user from OIDC profile %s", profile.getAttributes().toString()));
// Extracts these based on the default set of OIDC claims, described here:
// https://developer.okta.com/blog/2017/07/25/oidc-primer-part-1
@@ -217,7 +262,9 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) {
String email = profile.getEmail();
URI picture = profile.getPictureUrl();
String displayName = profile.getDisplayName();
- String fullName = (String) profile.getAttribute("name"); // Name claim is sometimes provided, including by Google.
+ String fullName =
+ (String)
+ profile.getAttribute("name"); // Name claim is sometimes provided, including by Google.
if (fullName == null && firstName != null && lastName != null) {
fullName = String.format("%s %s", firstName, lastName);
}
@@ -231,7 +278,8 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) {
userInfo.setFullName(fullName, SetMode.IGNORE_NULL);
userInfo.setEmail(email, SetMode.IGNORE_NULL);
// If there is a display name, use it. Otherwise fall back to full name.
- userInfo.setDisplayName(displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL);
+ userInfo.setDisplayName(
+ displayName == null ? userInfo.getFullName() : displayName, SetMode.IGNORE_NULL);
final CorpUserEditableInfo editableInfo = new CorpUserEditableInfo();
try {
@@ -254,15 +302,18 @@ private CorpUserSnapshot extractUser(CorpuserUrn urn, CommonProfile profile) {
private List extractGroups(CommonProfile profile) {
- log.debug(String.format("Attempting to extract groups from OIDC profile %s", profile.getAttributes().toString()));
+ log.debug(
+ String.format(
+ "Attempting to extract groups from OIDC profile %s",
+ profile.getAttributes().toString()));
final OidcConfigs configs = (OidcConfigs) _ssoManager.getSsoProvider().configs();
- // First, attempt to extract a list of groups from the profile, using the group name attribute config.
+ // First, attempt to extract a list of groups from the profile, using the group name attribute
+ // config.
final List extractedGroups = new ArrayList<>();
final List groupsClaimNames =
- new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(","))).stream()
- .map(String::trim)
- .collect(Collectors.toList());
+ new ArrayList(Arrays.asList(configs.getGroupsClaimName().split(",")))
+ .stream().map(String::trim).collect(Collectors.toList());
for (final String groupsClaimName : groupsClaimNames) {
@@ -273,14 +324,16 @@ private List extractGroups(CommonProfile profile) {
final Object groupAttribute = profile.getAttribute(groupsClaimName);
if (groupAttribute instanceof Collection) {
// List of group names
- groupNames = (Collection) profile.getAttribute(groupsClaimName, Collection.class);
+ groupNames =
+ (Collection) profile.getAttribute(groupsClaimName, Collection.class);
} else if (groupAttribute instanceof String) {
// Single group name
groupNames = Collections.singleton(profile.getAttribute(groupsClaimName, String.class));
} else {
log.error(
- String.format("Fail to parse OIDC group claim with name %s. Unknown type %s provided.", groupsClaimName,
- groupAttribute.getClass()));
+ String.format(
+ "Fail to parse OIDC group claim with name %s. Unknown type %s provided.",
+ groupsClaimName, groupAttribute.getClass()));
// Skip over group attribute. Do not throw.
groupNames = Collections.emptyList();
}
@@ -297,7 +350,8 @@ private List extractGroups(CommonProfile profile) {
corpGroupInfo.setDisplayName(groupName);
// To deal with the possibility of spaces, we url encode the URN group name.
- final String urlEncodedGroupName = URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString());
+ final String urlEncodedGroupName =
+ URLEncoder.encode(groupName, StandardCharsets.UTF_8.toString());
final CorpGroupUrn groupUrn = new CorpGroupUrn(urlEncodedGroupName);
final CorpGroupSnapshot corpGroupSnapshot = new CorpGroupSnapshot();
corpGroupSnapshot.setUrn(groupUrn);
@@ -306,18 +360,23 @@ private List extractGroups(CommonProfile profile) {
corpGroupSnapshot.setAspects(aspects);
groupSnapshots.add(corpGroupSnapshot);
} catch (UnsupportedEncodingException ex) {
- log.error(String.format("Failed to URL encoded extracted group name %s. Skipping", groupName));
+ log.error(
+ String.format(
+ "Failed to URL encoded extracted group name %s. Skipping", groupName));
}
}
if (groupSnapshots.isEmpty()) {
- log.warn(String.format("Failed to extract groups: No OIDC claim with name %s found", groupsClaimName));
+ log.warn(
+ String.format(
+ "Failed to extract groups: No OIDC claim with name %s found", groupsClaimName));
} else {
extractedGroups.addAll(groupSnapshots);
}
} catch (Exception e) {
- log.error(String.format(
- "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s",
- groupsClaimName, profile.getAttribute(groupsClaimName).getClass()));
+ log.error(
+ String.format(
+ "Failed to extract groups: Expected to find a list of strings for attribute with name %s, found %s",
+ groupsClaimName, profile.getAttribute(groupsClaimName).getClass()));
}
}
}
@@ -327,7 +386,8 @@ private List extractGroups(CommonProfile profile) {
private GroupMembership createGroupMembership(final List extractedGroups) {
final GroupMembership groupMembershipAspect = new GroupMembership();
groupMembershipAspect.setGroups(
- new UrnArray(extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())));
+ new UrnArray(
+ extractedGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())));
return groupMembershipAspect;
}
@@ -345,30 +405,39 @@ private void tryProvisionUser(CorpUserSnapshot corpUserSnapshot) {
// If we find more than the key aspect, then the entity "exists".
if (existingCorpUserSnapshot.getAspects().size() <= 1) {
log.debug(
- String.format("Extracted user that does not yet exist %s. Provisioning...", corpUserSnapshot.getUrn()));
+ String.format(
+ "Extracted user that does not yet exist %s. Provisioning...",
+ corpUserSnapshot.getUrn()));
// 2. The user does not exist. Provision them.
final Entity newEntity = new Entity();
newEntity.setValue(Snapshot.create(corpUserSnapshot));
_entityClient.update(newEntity, _systemAuthentication);
log.debug(String.format("Successfully provisioned user %s", corpUserSnapshot.getUrn()));
}
- log.debug(String.format("User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn()));
+ log.debug(
+ String.format(
+ "User %s already exists. Skipping provisioning", corpUserSnapshot.getUrn()));
// Otherwise, the user exists. Skip provisioning.
} catch (RemoteInvocationException e) {
// Failing provisioning is something worth throwing about.
- throw new RuntimeException(String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e);
+ throw new RuntimeException(
+ String.format("Failed to provision user with urn %s.", corpUserSnapshot.getUrn()), e);
}
}
private void tryProvisionGroups(List corpGroups) {
- log.debug(String.format("Attempting to provision groups with urns %s",
- corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())));
+ log.debug(
+ String.format(
+ "Attempting to provision groups with urns %s",
+ corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())));
// 1. Check if this user already exists.
try {
- final Set urnsToFetch = corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet());
- final Map existingGroups = _entityClient.batchGet(urnsToFetch, _systemAuthentication);
+ final Set urnsToFetch =
+ corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toSet());
+ final Map existingGroups =
+ _entityClient.batchGet(urnsToFetch, _systemAuthentication);
log.debug(String.format("Fetched GMS groups with urns %s", existingGroups.keySet()));
@@ -381,15 +450,21 @@ private void tryProvisionGroups(List corpGroups) {
// If more than the key aspect exists, then the group already "exists".
if (corpGroupSnapshot.getAspects().size() <= 1) {
- log.debug(String.format("Extracted group that does not yet exist %s. Provisioning...",
- corpGroupSnapshot.getUrn()));
+ log.debug(
+ String.format(
+ "Extracted group that does not yet exist %s. Provisioning...",
+ corpGroupSnapshot.getUrn()));
groupsToCreate.add(extractedGroup);
}
- log.debug(String.format("Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn()));
+ log.debug(
+ String.format(
+ "Group %s already exists. Skipping provisioning", corpGroupSnapshot.getUrn()));
} else {
// Should not occur until we stop returning default Key aspects for unrecognized entities.
log.debug(
- String.format("Extracted group that does not yet exist %s. Provisioning...", extractedGroup.getUrn()));
+ String.format(
+ "Extracted group that does not yet exist %s. Provisioning...",
+ extractedGroup.getUrn()));
groupsToCreate.add(extractedGroup);
}
}
@@ -400,15 +475,20 @@ private void tryProvisionGroups(List corpGroups) {
log.debug(String.format("Provisioning groups with urns %s", groupsToCreateUrns));
// Now batch create all entities identified to create.
- _entityClient.batchUpdate(groupsToCreate.stream()
- .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot)))
- .collect(Collectors.toSet()), _systemAuthentication);
+ _entityClient.batchUpdate(
+ groupsToCreate.stream()
+ .map(groupSnapshot -> new Entity().setValue(Snapshot.create(groupSnapshot)))
+ .collect(Collectors.toSet()),
+ _systemAuthentication);
log.debug(String.format("Successfully provisioned groups with urns %s", groupsToCreateUrns));
} catch (RemoteInvocationException e) {
// Failing provisioning is something worth throwing about.
- throw new RuntimeException(String.format("Failed to provision groups with urns %s.",
- corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())), e);
+ throw new RuntimeException(
+ String.format(
+ "Failed to provision groups with urns %s.",
+ corpGroups.stream().map(CorpGroupSnapshot::getUrn).collect(Collectors.toList())),
+ e);
}
}
@@ -423,12 +503,14 @@ private void updateGroupMembership(Urn urn, GroupMembership groupMembership) {
try {
_entityClient.ingestProposal(proposal, _systemAuthentication);
} catch (RemoteInvocationException e) {
- throw new RuntimeException(String.format("Failed to update group membership for user with urn %s", urn), e);
+ throw new RuntimeException(
+ String.format("Failed to update group membership for user with urn %s", urn), e);
}
}
private void verifyPreProvisionedUser(CorpuserUrn urn) {
- // Validate that the user exists in the system (there is more than just a key aspect for them, as of today).
+ // Validate that the user exists in the system (there is more than just a key aspect for them,
+ // as of today).
try {
final Entity corpUser = _entityClient.get(urn, _systemAuthentication);
@@ -436,9 +518,14 @@ private void verifyPreProvisionedUser(CorpuserUrn urn) {
// If we find more than the key aspect, then the entity "exists".
if (corpUser.getValue().getCorpUserSnapshot().getAspects().size() <= 1) {
- log.debug(String.format("Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn));
- throw new RuntimeException(String.format("User with urn %s has not yet been provisioned in DataHub. "
- + "Please contact your DataHub admin to provision an account.", urn));
+ log.debug(
+ String.format(
+ "Found user that does not yet exist %s. Invalid login attempt. Throwing...", urn));
+ throw new RuntimeException(
+ String.format(
+ "User with urn %s has not yet been provisioned in DataHub. "
+ + "Please contact your DataHub admin to provision an account.",
+ urn));
}
// Otherwise, the user exists.
} catch (RemoteInvocationException e) {
diff --git a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java
index eb037db2ef9c01..6877ca187da973 100644
--- a/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java
+++ b/datahub-frontend/app/auth/sso/oidc/OidcConfigs.java
@@ -1,104 +1,122 @@
package auth.sso.oidc;
+import static auth.ConfigUtil.*;
+
import auth.sso.SsoConfigs;
import java.util.Optional;
import lombok.Getter;
-import static auth.ConfigUtil.*;
-
-
-/**
- * Class responsible for extracting and validating OIDC related configurations.
- */
+/** Class responsible for extracting and validating OIDC related configurations. */
@Getter
public class OidcConfigs extends SsoConfigs {
- /**
- * Required configs
- */
- public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId";
- public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret";
- public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri";
+ /** Required configs */
+ public static final String OIDC_CLIENT_ID_CONFIG_PATH = "auth.oidc.clientId";
+
+ public static final String OIDC_CLIENT_SECRET_CONFIG_PATH = "auth.oidc.clientSecret";
+ public static final String OIDC_DISCOVERY_URI_CONFIG_PATH = "auth.oidc.discoveryUri";
+
+ /** Optional configs */
+ public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim";
+
+ public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex";
+ public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope";
+ public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName";
+ public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH =
+ "auth.oidc.clientAuthenticationMethod";
+ public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH =
+ "auth.oidc.jitProvisioningEnabled";
+ public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH =
+ "auth.oidc.preProvisioningRequired";
+ public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled";
+ public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH =
+ "auth.oidc.groupsClaim"; // Claim expected to be an array of group names.
+ public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType";
+ public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode";
+ public static final String OIDC_USE_NONCE = "auth.oidc.useNonce";
+ public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource";
+ public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout";
+ public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS =
+ "auth.oidc.extractJwtAccessTokenClaims";
+ public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm";
- /**
- * Optional configs
- */
- public static final String OIDC_USERNAME_CLAIM_CONFIG_PATH = "auth.oidc.userNameClaim";
- public static final String OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH = "auth.oidc.userNameClaimRegex";
- public static final String OIDC_SCOPE_CONFIG_PATH = "auth.oidc.scope";
- public static final String OIDC_CLIENT_NAME_CONFIG_PATH = "auth.oidc.clientName";
- public static final String OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH = "auth.oidc.clientAuthenticationMethod";
- public static final String OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH = "auth.oidc.jitProvisioningEnabled";
- public static final String OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH = "auth.oidc.preProvisioningRequired";
- public static final String OIDC_EXTRACT_GROUPS_ENABLED = "auth.oidc.extractGroupsEnabled";
- public static final String OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH = "auth.oidc.groupsClaim"; // Claim expected to be an array of group names.
- public static final String OIDC_RESPONSE_TYPE = "auth.oidc.responseType";
- public static final String OIDC_RESPONSE_MODE = "auth.oidc.responseMode";
- public static final String OIDC_USE_NONCE = "auth.oidc.useNonce";
- public static final String OIDC_CUSTOM_PARAM_RESOURCE = "auth.oidc.customParam.resource";
- public static final String OIDC_READ_TIMEOUT = "auth.oidc.readTimeout";
- public static final String OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS = "auth.oidc.extractJwtAccessTokenClaims";
- public static final String OIDC_PREFERRED_JWS_ALGORITHM = "auth.oidc.preferredJwsAlgorithm";
+ /** Default values */
+ private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email";
- /**
- * Default values
- */
- private static final String DEFAULT_OIDC_USERNAME_CLAIM = "email";
- private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)";
- private static final String DEFAULT_OIDC_SCOPE = "openid profile email"; // Often "group" must be included for groups.
- private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc";
- private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic";
- private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true";
- private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false";
- private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED = "false"; // False since extraction of groups can overwrite existing group membership.
- private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups";
- private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000";
+ private static final String DEFAULT_OIDC_USERNAME_CLAIM_REGEX = "(.*)";
+ private static final String DEFAULT_OIDC_SCOPE =
+ "openid profile email"; // Often "group" must be included for groups.
+ private static final String DEFAULT_OIDC_CLIENT_NAME = "oidc";
+ private static final String DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD = "client_secret_basic";
+ private static final String DEFAULT_OIDC_JIT_PROVISIONING_ENABLED = "true";
+ private static final String DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED = "false";
+ private static final String DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED =
+ "false"; // False since extraction of groups can overwrite existing group membership.
+ private static final String DEFAULT_OIDC_GROUPS_CLAIM = "groups";
+ private static final String DEFAULT_OIDC_READ_TIMEOUT = "5000";
- private String clientId;
- private String clientSecret;
- private String discoveryUri;
- private String userNameClaim;
- private String userNameClaimRegex;
- private String scope;
- private String clientName;
- private String clientAuthenticationMethod;
- private boolean jitProvisioningEnabled;
- private boolean preProvisioningRequired;
- private boolean extractGroupsEnabled;
- private String groupsClaimName;
- private Optional responseType;
- private Optional responseMode;
- private Optional useNonce;
- private Optional customParamResource;
- private String readTimeout;
- private Optional extractJwtAccessTokenClaims;
- private Optional preferredJwsAlgorithm;
+ private String clientId;
+ private String clientSecret;
+ private String discoveryUri;
+ private String userNameClaim;
+ private String userNameClaimRegex;
+ private String scope;
+ private String clientName;
+ private String clientAuthenticationMethod;
+ private boolean jitProvisioningEnabled;
+ private boolean preProvisioningRequired;
+ private boolean extractGroupsEnabled;
+ private String groupsClaimName;
+ private Optional responseType;
+ private Optional responseMode;
+ private Optional useNonce;
+ private Optional customParamResource;
+ private String readTimeout;
+ private Optional extractJwtAccessTokenClaims;
+ private Optional preferredJwsAlgorithm;
- public OidcConfigs(final com.typesafe.config.Config configs) {
- super(configs);
- clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH);
- clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH);
- discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH);
- userNameClaim = getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM);
- userNameClaimRegex =
- getOptional(configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX);
- scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE);
- clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME);
- clientAuthenticationMethod = getOptional(configs, OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH,
+ public OidcConfigs(final com.typesafe.config.Config configs) {
+ super(configs);
+ clientId = getRequired(configs, OIDC_CLIENT_ID_CONFIG_PATH);
+ clientSecret = getRequired(configs, OIDC_CLIENT_SECRET_CONFIG_PATH);
+ discoveryUri = getRequired(configs, OIDC_DISCOVERY_URI_CONFIG_PATH);
+ userNameClaim =
+ getOptional(configs, OIDC_USERNAME_CLAIM_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM);
+ userNameClaimRegex =
+ getOptional(
+ configs, OIDC_USERNAME_CLAIM_REGEX_CONFIG_PATH, DEFAULT_OIDC_USERNAME_CLAIM_REGEX);
+ scope = getOptional(configs, OIDC_SCOPE_CONFIG_PATH, DEFAULT_OIDC_SCOPE);
+ clientName = getOptional(configs, OIDC_CLIENT_NAME_CONFIG_PATH, DEFAULT_OIDC_CLIENT_NAME);
+ clientAuthenticationMethod =
+ getOptional(
+ configs,
+ OIDC_CLIENT_AUTHENTICATION_METHOD_CONFIG_PATH,
DEFAULT_OIDC_CLIENT_AUTHENTICATION_METHOD);
- jitProvisioningEnabled = Boolean.parseBoolean(
- getOptional(configs, OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH, DEFAULT_OIDC_JIT_PROVISIONING_ENABLED));
- preProvisioningRequired = Boolean.parseBoolean(
- getOptional(configs, OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH, DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED));
- extractGroupsEnabled = Boolean.parseBoolean(
+ jitProvisioningEnabled =
+ Boolean.parseBoolean(
+ getOptional(
+ configs,
+ OIDC_JIT_PROVISIONING_ENABLED_CONFIG_PATH,
+ DEFAULT_OIDC_JIT_PROVISIONING_ENABLED));
+ preProvisioningRequired =
+ Boolean.parseBoolean(
+ getOptional(
+ configs,
+ OIDC_PRE_PROVISIONING_REQUIRED_CONFIG_PATH,
+ DEFAULT_OIDC_PRE_PROVISIONING_REQUIRED));
+ extractGroupsEnabled =
+ Boolean.parseBoolean(
getOptional(configs, OIDC_EXTRACT_GROUPS_ENABLED, DEFAULT_OIDC_EXTRACT_GROUPS_ENABLED));
- groupsClaimName = getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM);
- responseType = getOptional(configs, OIDC_RESPONSE_TYPE);
- responseMode = getOptional(configs, OIDC_RESPONSE_MODE);
- useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean);
- customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE);
- readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT);
- extractJwtAccessTokenClaims = getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean);
- preferredJwsAlgorithm = Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null));
- }
+ groupsClaimName =
+ getOptional(configs, OIDC_GROUPS_CLAIM_CONFIG_PATH_CONFIG_PATH, DEFAULT_OIDC_GROUPS_CLAIM);
+ responseType = getOptional(configs, OIDC_RESPONSE_TYPE);
+ responseMode = getOptional(configs, OIDC_RESPONSE_MODE);
+ useNonce = getOptional(configs, OIDC_USE_NONCE).map(Boolean::parseBoolean);
+ customParamResource = getOptional(configs, OIDC_CUSTOM_PARAM_RESOURCE);
+ readTimeout = getOptional(configs, OIDC_READ_TIMEOUT, DEFAULT_OIDC_READ_TIMEOUT);
+ extractJwtAccessTokenClaims =
+ getOptional(configs, OIDC_EXTRACT_JWT_ACCESS_TOKEN_CLAIMS).map(Boolean::parseBoolean);
+ preferredJwsAlgorithm =
+ Optional.ofNullable(getOptional(configs, OIDC_PREFERRED_JWS_ALGORITHM, null));
+ }
}
diff --git a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java
index fd0a2e1877154e..39a65a46cbf919 100644
--- a/datahub-frontend/app/auth/sso/oidc/OidcProvider.java
+++ b/datahub-frontend/app/auth/sso/oidc/OidcProvider.java
@@ -10,15 +10,15 @@
import org.pac4j.oidc.credentials.OidcCredentials;
import org.pac4j.oidc.profile.OidcProfileDefinition;
-
/**
* Implementation of {@link SsoProvider} supporting the OIDC protocol.
*
- * This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC related
- * configuration options, which reside in an instance of {@link OidcConfigs}.
+ * This class is a thin wrapper over a Pac4J {@link Client} object and all DataHub-specific OIDC
+ * related configuration options, which reside in an instance of {@link OidcConfigs}.
*
- * It is responsible for initializing this client from a configuration object ({@link OidcConfigs}. Note that
- * this class is not related to the logic performed when an IdP performs a callback to DataHub.
+ *
It is responsible for initializing this client from a configuration object ({@link
+ * OidcConfigs}. Note that this class is not related to the logic performed when an IdP performs a
+ * callback to DataHub.
*/
@Slf4j
public class OidcProvider implements SsoProvider {
@@ -53,7 +53,8 @@ private Client createPac4jClient() {
oidcConfiguration.setClientId(_oidcConfigs.getClientId());
oidcConfiguration.setSecret(_oidcConfigs.getClientSecret());
oidcConfiguration.setDiscoveryURI(_oidcConfigs.getDiscoveryUri());
- oidcConfiguration.setClientAuthenticationMethodAsString(_oidcConfigs.getClientAuthenticationMethod());
+ oidcConfiguration.setClientAuthenticationMethodAsString(
+ _oidcConfigs.getClientAuthenticationMethod());
oidcConfiguration.setScope(_oidcConfigs.getScope());
try {
oidcConfiguration.setReadTimeout(Integer.parseInt(_oidcConfigs.getReadTimeout()));
@@ -63,18 +64,24 @@ private Client createPac4jClient() {
_oidcConfigs.getResponseType().ifPresent(oidcConfiguration::setResponseType);
_oidcConfigs.getResponseMode().ifPresent(oidcConfiguration::setResponseMode);
_oidcConfigs.getUseNonce().ifPresent(oidcConfiguration::setUseNonce);
- _oidcConfigs.getCustomParamResource()
+ _oidcConfigs
+ .getCustomParamResource()
.ifPresent(value -> oidcConfiguration.setCustomParams(ImmutableMap.of("resource", value)));
- _oidcConfigs.getPreferredJwsAlgorithm().ifPresent(preferred -> {
- log.info("Setting preferredJwsAlgorithm: " + preferred);
- oidcConfiguration.setPreferredJwsAlgorithm(preferred);
- });
+ _oidcConfigs
+ .getPreferredJwsAlgorithm()
+ .ifPresent(
+ preferred -> {
+ log.info("Setting preferredJwsAlgorithm: " + preferred);
+ oidcConfiguration.setPreferredJwsAlgorithm(preferred);
+ });
final CustomOidcClient oidcClient = new CustomOidcClient(oidcConfiguration);
oidcClient.setName(OIDC_CLIENT_NAME);
- oidcClient.setCallbackUrl(_oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath());
+ oidcClient.setCallbackUrl(
+ _oidcConfigs.getAuthBaseUrl() + _oidcConfigs.getAuthBaseCallbackPath());
oidcClient.setCallbackUrlResolver(new PathParameterCallbackUrlResolver());
- oidcClient.addAuthorizationGenerator(new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs));
+ oidcClient.addAuthorizationGenerator(
+ new OidcAuthorizationGenerator(new OidcProfileDefinition(), _oidcConfigs));
return oidcClient;
}
}
diff --git a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java
index 014632c17e690f..9881b5e095b781 100644
--- a/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java
+++ b/datahub-frontend/app/auth/sso/oidc/OidcResponseErrorHandler.java
@@ -1,57 +1,58 @@
package auth.sso.oidc;
+import static play.mvc.Results.internalServerError;
+import static play.mvc.Results.unauthorized;
+
+import java.util.Optional;
import org.pac4j.play.PlayWebContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import play.mvc.Result;
-import java.util.Optional;
-
-import static play.mvc.Results.internalServerError;
-import static play.mvc.Results.unauthorized;
-
-
public class OidcResponseErrorHandler {
- private OidcResponseErrorHandler() {
-
- }
-
- private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler");
+ private OidcResponseErrorHandler() {}
- private static final String ERROR_FIELD_NAME = "error";
- private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description";
+ private static final Logger _logger = LoggerFactory.getLogger("OidcResponseErrorHandler");
- public static Result handleError(final PlayWebContext context) {
+ private static final String ERROR_FIELD_NAME = "error";
+ private static final String ERROR_DESCRIPTION_FIELD_NAME = "error_description";
- _logger.warn("OIDC responded with an error: '{}'. Error description: '{}'",
- getError(context),
- getErrorDescription(context));
+ public static Result handleError(final PlayWebContext context) {
- if (getError(context).isPresent() && getError(context).get().equals("access_denied")) {
- return unauthorized(String.format("Access denied. "
- + "The OIDC service responded with 'Access denied'. "
- + "It seems that you don't have access to this application yet. Please apply for access. \n\n"
- + "If you already have been assigned this application, it may be so that your OIDC request is still in action. "
- + "Error details: '%s':'%s'",
- context.getRequestParameter("error"),
- context.getRequestParameter("error_description")));
- }
+ _logger.warn(
+ "OIDC responded with an error: '{}'. Error description: '{}'",
+ getError(context),
+ getErrorDescription(context));
- return internalServerError(
- String.format("Internal server error. The OIDC service responded with an error: '%s'.\n"
- + "Error description: '%s'", getError(context).orElse(""), getErrorDescription(context).orElse("")));
+ if (getError(context).isPresent() && getError(context).get().equals("access_denied")) {
+ return unauthorized(
+ String.format(
+ "Access denied. "
+ + "The OIDC service responded with 'Access denied'. "
+ + "It seems that you don't have access to this application yet. Please apply for access. \n\n"
+ + "If you already have been assigned this application, it may be so that your OIDC request is still in action. "
+ + "Error details: '%s':'%s'",
+ context.getRequestParameter("error"),
+ context.getRequestParameter("error_description")));
}
- public static boolean isError(final PlayWebContext context) {
- return getError(context).isPresent() && !getError(context).get().isEmpty();
- }
+ return internalServerError(
+ String.format(
+ "Internal server error. The OIDC service responded with an error: '%s'.\n"
+ + "Error description: '%s'",
+ getError(context).orElse(""), getErrorDescription(context).orElse("")));
+ }
- public static Optional getError(final PlayWebContext context) {
- return context.getRequestParameter(ERROR_FIELD_NAME);
- }
+ public static boolean isError(final PlayWebContext context) {
+ return getError(context).isPresent() && !getError(context).get().isEmpty();
+ }
- public static Optional getErrorDescription(final PlayWebContext context) {
- return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME);
- }
+ public static Optional getError(final PlayWebContext context) {
+ return context.getRequestParameter(ERROR_FIELD_NAME);
+ }
+
+ public static Optional getErrorDescription(final PlayWebContext context) {
+ return context.getRequestParameter(ERROR_DESCRIPTION_FIELD_NAME);
+ }
}
diff --git a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java
index 8c8c250fb7e639..01f8f16171d133 100644
--- a/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java
+++ b/datahub-frontend/app/auth/sso/oidc/custom/CustomOidcAuthenticator.java
@@ -1,8 +1,8 @@
package auth.sso.oidc.custom;
-import com.nimbusds.oauth2.sdk.AuthorizationGrant;
import com.nimbusds.oauth2.sdk.AuthorizationCode;
import com.nimbusds.oauth2.sdk.AuthorizationCodeGrant;
+import com.nimbusds.oauth2.sdk.AuthorizationGrant;
import com.nimbusds.oauth2.sdk.ParseException;
import com.nimbusds.oauth2.sdk.TokenErrorResponse;
import com.nimbusds.oauth2.sdk.TokenRequest;
@@ -37,7 +37,6 @@
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
-
public class CustomOidcAuthenticator implements Authenticator {
private static final Logger logger = LoggerFactory.getLogger(OidcAuthenticator.class);
@@ -61,14 +60,17 @@ public CustomOidcAuthenticator(final OidcClient client) {
this.client = client;
// check authentication methods
- final List metadataMethods = configuration.findProviderMetadata().getTokenEndpointAuthMethods();
+ final List metadataMethods =
+ configuration.findProviderMetadata().getTokenEndpointAuthMethods();
- final ClientAuthenticationMethod preferredMethod = getPreferredAuthenticationMethod(configuration);
+ final ClientAuthenticationMethod preferredMethod =
+ getPreferredAuthenticationMethod(configuration);
final ClientAuthenticationMethod chosenMethod;
if (CommonHelper.isNotEmpty(metadataMethods)) {
if (preferredMethod != null) {
- if (ClientAuthenticationMethod.NONE.equals(preferredMethod) || metadataMethods.contains(preferredMethod)) {
+ if (ClientAuthenticationMethod.NONE.equals(preferredMethod)
+ || metadataMethods.contains(preferredMethod)) {
chosenMethod = preferredMethod;
} else {
throw new TechnicalException(
@@ -83,8 +85,10 @@ public CustomOidcAuthenticator(final OidcClient client) {
chosenMethod = firstSupportedMethod(metadataMethods);
}
} else {
- chosenMethod = preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault();
- logger.info("Provider metadata does not provide Token endpoint authentication methods. Using: {}",
+ chosenMethod =
+ preferredMethod != null ? preferredMethod : ClientAuthenticationMethod.getDefault();
+ logger.info(
+ "Provider metadata does not provide Token endpoint authentication methods. Using: {}",
chosenMethod);
}
@@ -103,38 +107,41 @@ public CustomOidcAuthenticator(final OidcClient client) {
}
/**
- * The preferred {@link ClientAuthenticationMethod} specified in the given
- * {@link OidcConfiguration}, or null
meaning that the a
- * provider-supported method should be chosen.
+ * The preferred {@link ClientAuthenticationMethod} specified in the given {@link
+ * OidcConfiguration}, or null
meaning that the a provider-supported method should be
+ * chosen.
*/
- private static ClientAuthenticationMethod getPreferredAuthenticationMethod(OidcConfiguration config) {
+ private static ClientAuthenticationMethod getPreferredAuthenticationMethod(
+ OidcConfiguration config) {
final ClientAuthenticationMethod configurationMethod = config.getClientAuthenticationMethod();
if (configurationMethod == null) {
return null;
}
if (!SUPPORTED_METHODS.contains(configurationMethod)) {
- throw new TechnicalException("Configured authentication method (" + configurationMethod + ") is not supported.");
+ throw new TechnicalException(
+ "Configured authentication method (" + configurationMethod + ") is not supported.");
}
return configurationMethod;
}
/**
- * The first {@link ClientAuthenticationMethod} from the given list of
- * methods that is supported by this implementation.
+ * The first {@link ClientAuthenticationMethod} from the given list of methods that is supported
+ * by this implementation.
*
- * @throws TechnicalException
- * if none of the provider-supported methods is supported.
+ * @throws TechnicalException if none of the provider-supported methods is supported.
*/
- private static ClientAuthenticationMethod firstSupportedMethod(final List metadataMethods) {
+ private static ClientAuthenticationMethod firstSupportedMethod(
+ final List metadataMethods) {
Optional firstSupported =
metadataMethods.stream().filter((m) -> SUPPORTED_METHODS.contains(m)).findFirst();
if (firstSupported.isPresent()) {
return firstSupported.get();
} else {
- throw new TechnicalException("None of the Token endpoint provider metadata authentication methods are supported: "
- + metadataMethods);
+ throw new TechnicalException(
+ "None of the Token endpoint provider metadata authentication methods are supported: "
+ + metadataMethods);
}
}
@@ -145,21 +152,30 @@ public void validate(final OidcCredentials credentials, final WebContext context
if (code != null) {
try {
final String computedCallbackUrl = client.computeFinalCallbackUrl(context);
- CodeVerifier verifier = (CodeVerifier) configuration.getValueRetriever()
- .retrieve(client.getCodeVerifierSessionAttributeName(), client, context).orElse(null);
+ CodeVerifier verifier =
+ (CodeVerifier)
+ configuration
+ .getValueRetriever()
+ .retrieve(client.getCodeVerifierSessionAttributeName(), client, context)
+ .orElse(null);
// Token request
- final TokenRequest request = createTokenRequest(new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier));
+ final TokenRequest request =
+ createTokenRequest(
+ new AuthorizationCodeGrant(code, new URI(computedCallbackUrl), verifier));
HTTPRequest tokenHttpRequest = request.toHTTPRequest();
tokenHttpRequest.setConnectTimeout(configuration.getConnectTimeout());
tokenHttpRequest.setReadTimeout(configuration.getReadTimeout());
final HTTPResponse httpResponse = tokenHttpRequest.send();
- logger.debug("Token response: status={}, content={}", httpResponse.getStatusCode(),
+ logger.debug(
+ "Token response: status={}, content={}",
+ httpResponse.getStatusCode(),
httpResponse.getContent());
final TokenResponse response = OIDCTokenResponseParser.parse(httpResponse);
if (response instanceof TokenErrorResponse) {
- throw new TechnicalException("Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject());
+ throw new TechnicalException(
+ "Bad token response, error=" + ((TokenErrorResponse) response).getErrorObject());
}
logger.debug("Token response successful");
final OIDCTokenResponse tokenSuccessResponse = (OIDCTokenResponse) response;
@@ -178,11 +194,15 @@ public void validate(final OidcCredentials credentials, final WebContext context
private TokenRequest createTokenRequest(final AuthorizationGrant grant) {
if (clientAuthentication != null) {
- return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(),
- this.clientAuthentication, grant);
+ return new TokenRequest(
+ configuration.findProviderMetadata().getTokenEndpointURI(),
+ this.clientAuthentication,
+ grant);
} else {
- return new TokenRequest(configuration.findProviderMetadata().getTokenEndpointURI(),
- new ClientID(configuration.getClientId()), grant);
+ return new TokenRequest(
+ configuration.findProviderMetadata().getTokenEndpointURI(),
+ new ClientID(configuration.getClientId()),
+ grant);
}
}
}
diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java
index 24183f5c625da9..4d40f45cd09b48 100644
--- a/datahub-frontend/app/client/AuthServiceClient.java
+++ b/datahub-frontend/app/client/AuthServiceClient.java
@@ -3,7 +3,6 @@
import com.datahub.authentication.Authentication;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
-
import java.nio.charset.StandardCharsets;
import java.util.Objects;
import javax.annotation.Nonnull;
@@ -17,17 +16,16 @@
import org.apache.http.util.EntityUtils;
import play.mvc.Http;
-
-/**
- * This class is responsible for coordinating authentication with the backend Metadata Service.
- */
+/** This class is responsible for coordinating authentication with the backend Metadata Service. */
@Slf4j
public class AuthServiceClient {
private static final String GENERATE_SESSION_TOKEN_ENDPOINT = "auth/generateSessionTokenForUser";
private static final String SIGN_UP_ENDPOINT = "auth/signUp";
- private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials";
- private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials";
+ private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT =
+ "auth/resetNativeUserCredentials";
+ private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT =
+ "auth/verifyNativeUserCredentials";
private static final String TRACK_ENDPOINT = "auth/track";
private static final String ACCESS_TOKEN_FIELD = "accessToken";
private static final String USER_ID_FIELD = "userId";
@@ -39,7 +37,8 @@ public class AuthServiceClient {
private static final String INVITE_TOKEN_FIELD = "inviteToken";
private static final String RESET_TOKEN_FIELD = "resetToken";
private static final String IS_NATIVE_USER_CREATED_FIELD = "isNativeUserCreated";
- private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD = "areNativeUserCredentialsReset";
+ private static final String ARE_NATIVE_USER_CREDENTIALS_RESET_FIELD =
+ "areNativeUserCredentialsReset";
private static final String DOES_PASSWORD_MATCH_FIELD = "doesPasswordMatch";
private final String metadataServiceHost;
@@ -48,8 +47,11 @@ public class AuthServiceClient {
private final Authentication systemAuthentication;
private final CloseableHttpClient httpClient;
- public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort,
- @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication,
+ public AuthServiceClient(
+ @Nonnull final String metadataServiceHost,
+ @Nonnull final Integer metadataServicePort,
+ @Nonnull final Boolean useSsl,
+ @Nonnull final Authentication systemAuthentication,
@Nonnull final CloseableHttpClient httpClient) {
this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost);
this.metadataServicePort = Objects.requireNonNull(metadataServicePort);
@@ -59,10 +61,11 @@ public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull fin
}
/**
- * Call the Auth Service to generate a session token for a particular user with a unique actor id, or throws an exception if generation fails.
+ * Call the Auth Service to generate a session token for a particular user with a unique actor id,
+ * or throws an exception if generation fails.
*
- * Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of an Actor of type
- * USER.
+ * Notice that the "userId" parameter should NOT be of type "urn", but rather the unique id of
+ * an Actor of type USER.
*/
@Nonnull
public String generateSessionTokenForUser(@Nonnull final String userId) {
@@ -72,15 +75,21 @@ public String generateSessionTokenForUser(@Nonnull final String userId) {
try {
final String protocol = this.metadataServiceUseSsl ? "https" : "http";
- final HttpPost request = new HttpPost(
- String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort,
- GENERATE_SESSION_TOKEN_ENDPOINT));
+ final HttpPost request =
+ new HttpPost(
+ String.format(
+ "%s://%s:%s/%s",
+ protocol,
+ this.metadataServiceHost,
+ this.metadataServicePort,
+ GENERATE_SESSION_TOKEN_ENDPOINT));
// Build JSON request to generate a token on behalf of a user.
final ObjectMapper objectMapper = new ObjectMapper();
final ObjectNode objectNode = objectMapper.createObjectNode();
objectNode.put(USER_ID_FIELD, userId);
- final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
+ final String json =
+ objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
request.setEntity(new StringEntity(json, StandardCharsets.UTF_8));
// Add authorization header with DataHub frontend system id and secret.
@@ -94,7 +103,8 @@ public String generateSessionTokenForUser(@Nonnull final String userId) {
return getAccessTokenFromJson(jsonStr);
} else {
throw new RuntimeException(
- String.format("Bad response from the Metadata Service: %s %s",
+ String.format(
+ "Bad response from the Metadata Service: %s %s",
response.getStatusLine().toString(), response.getEntity().toString()));
}
} catch (Exception e) {
@@ -110,11 +120,14 @@ public String generateSessionTokenForUser(@Nonnull final String userId) {
}
}
- /**
- * Call the Auth Service to create a native Datahub user.
- */
- public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullName, @Nonnull final String email,
- @Nonnull final String title, @Nonnull final String password, @Nonnull final String inviteToken) {
+ /** Call the Auth Service to create a native Datahub user. */
+ public boolean signUp(
+ @Nonnull final String userUrn,
+ @Nonnull final String fullName,
+ @Nonnull final String email,
+ @Nonnull final String title,
+ @Nonnull final String password,
+ @Nonnull final String inviteToken) {
Objects.requireNonNull(userUrn, "userUrn must not be null");
Objects.requireNonNull(fullName, "fullName must not be null");
Objects.requireNonNull(email, "email must not be null");
@@ -126,9 +139,11 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN
try {
final String protocol = this.metadataServiceUseSsl ? "https" : "http";
- final HttpPost request = new HttpPost(
- String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort,
- SIGN_UP_ENDPOINT));
+ final HttpPost request =
+ new HttpPost(
+ String.format(
+ "%s://%s:%s/%s",
+ protocol, this.metadataServiceHost, this.metadataServicePort, SIGN_UP_ENDPOINT));
// Build JSON request to sign up a native user.
final ObjectMapper objectMapper = new ObjectMapper();
@@ -139,7 +154,8 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN
objectNode.put(TITLE_FIELD, title);
objectNode.put(PASSWORD_FIELD, password);
objectNode.put(INVITE_TOKEN_FIELD, inviteToken);
- final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
+ final String json =
+ objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
request.setEntity(new StringEntity(json, StandardCharsets.UTF_8));
// Add authorization header with DataHub frontend system id and secret.
@@ -152,11 +168,15 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN
final String jsonStr = EntityUtils.toString(entity);
return getIsNativeUserCreatedFromJson(jsonStr);
} else {
- String content = response.getEntity().getContent() == null ? "" : new String(
- response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8);
+ String content =
+ response.getEntity().getContent() == null
+ ? ""
+ : new String(
+ response.getEntity().getContent().readAllBytes(), StandardCharsets.UTF_8);
throw new RuntimeException(
- String.format("Bad response from the Metadata Service: %s %s Body: %s", response.getStatusLine().toString(),
- response.getEntity().toString(), content));
+ String.format(
+ "Bad response from the Metadata Service: %s %s Body: %s",
+ response.getStatusLine().toString(), response.getEntity().toString(), content));
}
} catch (Exception e) {
throw new RuntimeException(String.format("Failed to create user %s", userUrn), e);
@@ -171,10 +191,10 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN
}
}
- /**
- * Call the Auth Service to reset credentials for a native DataHub user.
- */
- public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password,
+ /** Call the Auth Service to reset credentials for a native DataHub user. */
+ public boolean resetNativeUserCredentials(
+ @Nonnull final String userUrn,
+ @Nonnull final String password,
@Nonnull final String resetToken) {
Objects.requireNonNull(userUrn, "userUrn must not be null");
Objects.requireNonNull(password, "password must not be null");
@@ -184,9 +204,14 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul
try {
final String protocol = this.metadataServiceUseSsl ? "https" : "http";
- final HttpPost request = new HttpPost(
- String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort,
- RESET_NATIVE_USER_CREDENTIALS_ENDPOINT));
+ final HttpPost request =
+ new HttpPost(
+ String.format(
+ "%s://%s:%s/%s",
+ protocol,
+ this.metadataServiceHost,
+ this.metadataServicePort,
+ RESET_NATIVE_USER_CREDENTIALS_ENDPOINT));
// Build JSON request to verify credentials for a native user.
final ObjectMapper objectMapper = new ObjectMapper();
@@ -194,7 +219,8 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul
objectNode.put(USER_URN_FIELD, userUrn);
objectNode.put(PASSWORD_FIELD, password);
objectNode.put(RESET_TOKEN_FIELD, resetToken);
- final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
+ final String json =
+ objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
request.setEntity(new StringEntity(json, StandardCharsets.UTF_8));
// Add authorization header with DataHub frontend system id and secret.
@@ -208,8 +234,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul
return getAreNativeUserCredentialsResetFromJson(jsonStr);
} else {
throw new RuntimeException(
- String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(),
- response.getEntity().toString()));
+ String.format(
+ "Bad response from the Metadata Service: %s %s",
+ response.getStatusLine().toString(), response.getEntity().toString()));
}
} catch (Exception e) {
throw new RuntimeException("Failed to reset credentials for user", e);
@@ -224,10 +251,9 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul
}
}
- /**
- * Call the Auth Service to verify the credentials for a native Datahub user.
- */
- public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) {
+ /** Call the Auth Service to verify the credentials for a native Datahub user. */
+ public boolean verifyNativeUserCredentials(
+ @Nonnull final String userUrn, @Nonnull final String password) {
Objects.requireNonNull(userUrn, "userUrn must not be null");
Objects.requireNonNull(password, "password must not be null");
CloseableHttpResponse response = null;
@@ -235,16 +261,22 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu
try {
final String protocol = this.metadataServiceUseSsl ? "https" : "http";
- final HttpPost request = new HttpPost(
- String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort,
- VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT));
+ final HttpPost request =
+ new HttpPost(
+ String.format(
+ "%s://%s:%s/%s",
+ protocol,
+ this.metadataServiceHost,
+ this.metadataServicePort,
+ VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT));
// Build JSON request to verify credentials for a native user.
final ObjectMapper objectMapper = new ObjectMapper();
final ObjectNode objectNode = objectMapper.createObjectNode();
objectNode.put(USER_URN_FIELD, userUrn);
objectNode.put(PASSWORD_FIELD, password);
- final String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
+ final String json =
+ objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(objectNode);
request.setEntity(new StringEntity(json, StandardCharsets.UTF_8));
// Add authorization header with DataHub frontend system id and secret.
@@ -258,8 +290,9 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu
return getDoesPasswordMatchFromJson(jsonStr);
} else {
throw new RuntimeException(
- String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(),
- response.getEntity().toString()));
+ String.format(
+ "Bad response from the Metadata Service: %s %s",
+ response.getStatusLine().toString(), response.getEntity().toString()));
}
} catch (Exception e) {
throw new RuntimeException("Failed to verify credentials for user", e);
@@ -274,18 +307,18 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu
}
}
- /**
- * Call the Auth Service to track an analytics event
- */
+ /** Call the Auth Service to track an analytics event */
public void track(@Nonnull final String event) {
Objects.requireNonNull(event, "event must not be null");
CloseableHttpResponse response = null;
try {
final String protocol = this.metadataServiceUseSsl ? "https" : "http";
- final HttpPost request = new HttpPost(
- String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort,
- TRACK_ENDPOINT));
+ final HttpPost request =
+ new HttpPost(
+ String.format(
+ "%s://%s:%s/%s",
+ protocol, this.metadataServiceHost, this.metadataServicePort, TRACK_ENDPOINT));
// Build JSON request to track event.
request.setEntity(new StringEntity(event, StandardCharsets.UTF_8));
@@ -298,8 +331,9 @@ public void track(@Nonnull final String event) {
if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK || entity == null) {
throw new RuntimeException(
- String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(),
- response.getEntity().toString()));
+ String.format(
+ "Bad response from the Metadata Service: %s %s",
+ response.getStatusLine().toString(), response.getEntity().toString()));
}
} catch (Exception e) {
throw new RuntimeException("Failed to track event", e);
diff --git a/datahub-frontend/app/client/KafkaTrackingProducer.java b/datahub-frontend/app/client/KafkaTrackingProducer.java
index 59e91a6d5a0f7f..b7173684b63500 100644
--- a/datahub-frontend/app/client/KafkaTrackingProducer.java
+++ b/datahub-frontend/app/client/KafkaTrackingProducer.java
@@ -3,6 +3,15 @@
import com.linkedin.metadata.config.kafka.ProducerConfiguration;
import com.typesafe.config.Config;
import config.ConfigurationProvider;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.Properties;
+import java.util.concurrent.CompletableFuture;
+import javax.annotation.Nonnull;
+import javax.inject.Inject;
+import javax.inject.Singleton;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
@@ -15,98 +24,141 @@
import play.api.inject.ApplicationLifecycle;
import utils.ConfigUtil;
-import javax.inject.Inject;
-
-import javax.annotation.Nonnull;
-import javax.inject.Singleton;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Optional;
-import java.util.Properties;
-import java.util.concurrent.CompletableFuture;
-
@Singleton
public class KafkaTrackingProducer {
- private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName());
- private static final List KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList(
- Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(),
- SecurityProtocol.SASL_PLAINTEXT.name()));
-
- private final Boolean _isEnabled;
- private final KafkaProducer _producer;
-
- @Inject
- public KafkaTrackingProducer(@Nonnull Config config, ApplicationLifecycle lifecycle, final ConfigurationProvider configurationProvider) {
- _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled");
-
- if (_isEnabled) {
- _logger.debug("Analytics tracking is enabled");
- _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer());
-
- lifecycle.addStopHook(
- () -> {
- _producer.flush();
- _producer.close();
- return CompletableFuture.completedFuture(null);
- });
- } else {
- _logger.debug("Analytics tracking is disabled");
- _producer = null;
- }
- }
-
- public Boolean isEnabled() {
- return _isEnabled;
+ private final Logger _logger = LoggerFactory.getLogger(KafkaTrackingProducer.class.getName());
+ private static final List KAFKA_SSL_PROTOCOLS =
+ Collections.unmodifiableList(
+ Arrays.asList(
+ SecurityProtocol.SSL.name(),
+ SecurityProtocol.SASL_SSL.name(),
+ SecurityProtocol.SASL_PLAINTEXT.name()));
+
+ private final Boolean _isEnabled;
+ private final KafkaProducer _producer;
+
+ @Inject
+ public KafkaTrackingProducer(
+ @Nonnull Config config,
+ ApplicationLifecycle lifecycle,
+ final ConfigurationProvider configurationProvider) {
+ _isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled");
+
+ if (_isEnabled) {
+ _logger.debug("Analytics tracking is enabled");
+ _producer = createKafkaProducer(config, configurationProvider.getKafka().getProducer());
+
+ lifecycle.addStopHook(
+ () -> {
+ _producer.flush();
+ _producer.close();
+ return CompletableFuture.completedFuture(null);
+ });
+ } else {
+ _logger.debug("Analytics tracking is disabled");
+ _producer = null;
}
-
- public void send(ProducerRecord record) {
- _producer.send(record);
+ }
+
+ public Boolean isEnabled() {
+ return _isEnabled;
+ }
+
+ public void send(ProducerRecord record) {
+ _producer.send(record);
+ }
+
+ private static KafkaProducer createKafkaProducer(
+ Config config, ProducerConfiguration producerConfiguration) {
+ final Properties props = new Properties();
+ props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend");
+ props.put(
+ ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG,
+ config.getString("analytics.kafka.delivery.timeout.ms"));
+ props.put(
+ ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
+ config.getString("analytics.kafka.bootstrap.server"));
+ props.put(
+ ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
+ "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn.
+ props.put(
+ ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
+ "org.apache.kafka.common.serialization.StringSerializer"); // JSON object.
+ props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize());
+ props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType());
+
+ final String securityProtocolConfig = "analytics.kafka.security.protocol";
+ if (config.hasPath(securityProtocolConfig)
+ && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) {
+ props.put(
+ CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig));
+ setConfig(
+ config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password");
+
+ setConfig(
+ config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type");
+ setConfig(
+ config,
+ props,
+ SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG,
+ "analytics.kafka.ssl.keystore.location");
+ setConfig(
+ config,
+ props,
+ SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG,
+ "analytics.kafka.ssl.keystore.password");
+
+ setConfig(
+ config,
+ props,
+ SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG,
+ "analytics.kafka.ssl.truststore.type");
+ setConfig(
+ config,
+ props,
+ SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG,
+ "analytics.kafka.ssl.truststore.location");
+ setConfig(
+ config,
+ props,
+ SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG,
+ "analytics.kafka.ssl.truststore.password");
+
+ setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol");
+ setConfig(
+ config,
+ props,
+ SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG,
+ "analytics.kafka.ssl.endpoint.identification.algorithm");
+
+ final String securityProtocol = config.getString(securityProtocolConfig);
+ if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name())
+ || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) {
+ setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism");
+ setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config");
+ setConfig(
+ config,
+ props,
+ SaslConfigs.SASL_KERBEROS_SERVICE_NAME,
+ "analytics.kafka.sasl.kerberos.service.name");
+ setConfig(
+ config,
+ props,
+ SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS,
+ "analytics.kafka.sasl.login.callback.handler.class");
+ setConfig(
+ config,
+ props,
+ SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS,
+ "analytics.kafka.sasl.client.callback.handler.class");
+ }
}
- private static KafkaProducer createKafkaProducer(Config config, ProducerConfiguration producerConfiguration) {
- final Properties props = new Properties();
- props.put(ProducerConfig.CLIENT_ID_CONFIG, "datahub-frontend");
- props.put(ProducerConfig.DELIVERY_TIMEOUT_MS_CONFIG, config.getString("analytics.kafka.delivery.timeout.ms"));
- props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getString("analytics.kafka.bootstrap.server"));
- props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // Actor urn.
- props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer"); // JSON object.
- props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, producerConfiguration.getMaxRequestSize());
- props.put(ProducerConfig.COMPRESSION_TYPE_CONFIG, producerConfiguration.getCompressionType());
-
- final String securityProtocolConfig = "analytics.kafka.security.protocol";
- if (config.hasPath(securityProtocolConfig)
- && KAFKA_SSL_PROTOCOLS.contains(config.getString(securityProtocolConfig))) {
- props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, config.getString(securityProtocolConfig));
- setConfig(config, props, SslConfigs.SSL_KEY_PASSWORD_CONFIG, "analytics.kafka.ssl.key.password");
-
- setConfig(config, props, SslConfigs.SSL_KEYSTORE_TYPE_CONFIG, "analytics.kafka.ssl.keystore.type");
- setConfig(config, props, SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.keystore.location");
- setConfig(config, props, SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.keystore.password");
-
- setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "analytics.kafka.ssl.truststore.type");
- setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, "analytics.kafka.ssl.truststore.location");
- setConfig(config, props, SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "analytics.kafka.ssl.truststore.password");
-
- setConfig(config, props, SslConfigs.SSL_PROTOCOL_CONFIG, "analytics.kafka.ssl.protocol");
- setConfig(config, props, SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, "analytics.kafka.ssl.endpoint.identification.algorithm");
-
- final String securityProtocol = config.getString(securityProtocolConfig);
- if (securityProtocol.equals(SecurityProtocol.SASL_SSL.name())
- || securityProtocol.equals(SecurityProtocol.SASL_PLAINTEXT.name())) {
- setConfig(config, props, SaslConfigs.SASL_MECHANISM, "analytics.kafka.sasl.mechanism");
- setConfig(config, props, SaslConfigs.SASL_JAAS_CONFIG, "analytics.kafka.sasl.jaas.config");
- setConfig(config, props, SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "analytics.kafka.sasl.kerberos.service.name");
- setConfig(config, props, SaslConfigs.SASL_LOGIN_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.login.callback.handler.class");
- setConfig(config, props, SaslConfigs.SASL_CLIENT_CALLBACK_HANDLER_CLASS, "analytics.kafka.sasl.client.callback.handler.class");
- }
- }
-
- return new org.apache.kafka.clients.producer.KafkaProducer(props);
- }
+ return new org.apache.kafka.clients.producer.KafkaProducer(props);
+ }
- private static void setConfig(Config config, Properties props, String key, String configKey) {
- Optional.ofNullable(ConfigUtil.getString(config, configKey, null))
- .ifPresent(v -> props.put(key, v));
- }
+ private static void setConfig(Config config, Properties props, String key, String configKey) {
+ Optional.ofNullable(ConfigUtil.getString(config, configKey, null))
+ .ifPresent(v -> props.put(key, v));
+ }
}
diff --git a/datahub-frontend/app/config/ConfigurationProvider.java b/datahub-frontend/app/config/ConfigurationProvider.java
index 8f526c831b5c9b..3d87267f8ebe38 100644
--- a/datahub-frontend/app/config/ConfigurationProvider.java
+++ b/datahub-frontend/app/config/ConfigurationProvider.java
@@ -4,28 +4,22 @@
import com.linkedin.metadata.config.kafka.KafkaConfiguration;
import com.linkedin.metadata.spring.YamlPropertySourceFactory;
import lombok.Data;
-
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.PropertySource;
-
/**
- * Minimal sharing between metadata-service and frontend
- * Does not use the factories module to avoid transitive dependencies.
+ * Minimal sharing between metadata-service and frontend Does not use the factories module to avoid
+ * transitive dependencies.
*/
@EnableConfigurationProperties
@PropertySource(value = "application.yml", factory = YamlPropertySourceFactory.class)
@ConfigurationProperties
@Data
public class ConfigurationProvider {
- /**
- * Kafka related configs.
- */
- private KafkaConfiguration kafka;
+ /** Kafka related configs. */
+ private KafkaConfiguration kafka;
- /**
- * Configuration for caching
- */
- private CacheConfiguration cache;
+ /** Configuration for caching */
+ private CacheConfiguration cache;
}
diff --git a/datahub-frontend/app/controllers/Application.java b/datahub-frontend/app/controllers/Application.java
index 5c76f2572a9360..60971bf06e27bd 100644
--- a/datahub-frontend/app/controllers/Application.java
+++ b/datahub-frontend/app/controllers/Application.java
@@ -1,5 +1,8 @@
package controllers;
+import static auth.AuthUtils.ACTOR;
+import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME;
+
import akka.actor.ActorSystem;
import akka.stream.ActorMaterializer;
import akka.stream.Materializer;
@@ -9,41 +12,35 @@
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.linkedin.util.Pair;
import com.typesafe.config.Config;
-
+import java.io.InputStream;
+import java.time.Duration;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
-
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import play.Environment;
import play.http.HttpEntity;
+import play.libs.Json;
import play.libs.ws.InMemoryBodyWritable;
import play.libs.ws.StandaloneWSClient;
-import play.libs.Json;
import play.libs.ws.ahc.StandaloneAhcWSClient;
import play.mvc.Controller;
import play.mvc.Http;
import play.mvc.ResponseHeader;
import play.mvc.Result;
-import javax.annotation.Nonnull;
-import javax.annotation.Nullable;
-import javax.inject.Inject;
-import java.io.InputStream;
import play.mvc.Security;
import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient;
import play.shaded.ahc.org.asynchttpclient.AsyncHttpClientConfig;
import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClient;
import play.shaded.ahc.org.asynchttpclient.DefaultAsyncHttpClientConfig;
import utils.ConfigUtil;
-import java.time.Duration;
-
-import static auth.AuthUtils.ACTOR;
-import static auth.AuthUtils.SESSION_COOKIE_GMS_TOKEN_NAME;
-
public class Application extends Controller {
private final Logger _logger = LoggerFactory.getLogger(Application.class.getName());
@@ -61,22 +58,17 @@ public Application(Environment environment, @Nonnull Config config) {
/**
* Serves the build output index.html for any given path
*
- * @param path takes a path string, which essentially is ignored
- * routing is managed client side
+ * @param path takes a path string, which essentially is ignored routing is managed client side
* @return {Result} build output index.html resource
*/
@Nonnull
private Result serveAsset(@Nullable String path) {
try {
InputStream indexHtml = _environment.resourceAsStream("public/index.html");
- return ok(indexHtml)
- .withHeader("Cache-Control", "no-cache")
- .as("text/html");
+ return ok(indexHtml).withHeader("Cache-Control", "no-cache").as("text/html");
} catch (Exception e) {
_logger.warn("Cannot load public/index.html resource. Static assets or assets jar missing?");
- return notFound()
- .withHeader("Cache-Control", "no-cache")
- .as("text/html");
+ return notFound().withHeader("Cache-Control", "no-cache").as("text/html");
}
}
@@ -99,66 +91,87 @@ public Result index(@Nullable String path) {
/**
* Proxies requests to the Metadata Service
*
- * TODO: Investigate using mutual SSL authentication to call Metadata Service.
+ * TODO: Investigate using mutual SSL authentication to call Metadata Service.
*/
@Security.Authenticated(Authenticator.class)
- public CompletableFuture proxy(String path, Http.Request request) throws ExecutionException, InterruptedException {
+ public CompletableFuture proxy(String path, Http.Request request)
+ throws ExecutionException, InterruptedException {
final String authorizationHeaderValue = getAuthorizationHeaderValueToProxy(request);
final String resolvedUri = mapPath(request.uri());
- final String metadataServiceHost = ConfigUtil.getString(
- _config,
- ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH,
- ConfigUtil.DEFAULT_METADATA_SERVICE_HOST);
- final int metadataServicePort = ConfigUtil.getInt(
- _config,
- ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH,
- ConfigUtil.DEFAULT_METADATA_SERVICE_PORT);
- final boolean metadataServiceUseSsl = ConfigUtil.getBoolean(
- _config,
- ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH,
- ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL
- );
+ final String metadataServiceHost =
+ ConfigUtil.getString(
+ _config,
+ ConfigUtil.METADATA_SERVICE_HOST_CONFIG_PATH,
+ ConfigUtil.DEFAULT_METADATA_SERVICE_HOST);
+ final int metadataServicePort =
+ ConfigUtil.getInt(
+ _config,
+ ConfigUtil.METADATA_SERVICE_PORT_CONFIG_PATH,
+ ConfigUtil.DEFAULT_METADATA_SERVICE_PORT);
+ final boolean metadataServiceUseSsl =
+ ConfigUtil.getBoolean(
+ _config,
+ ConfigUtil.METADATA_SERVICE_USE_SSL_CONFIG_PATH,
+ ConfigUtil.DEFAULT_METADATA_SERVICE_USE_SSL);
// TODO: Fully support custom internal SSL.
final String protocol = metadataServiceUseSsl ? "https" : "http";
final Map> headers = request.getHeaders().toMap();
- if (headers.containsKey(Http.HeaderNames.HOST) && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) {
- headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST));
+ if (headers.containsKey(Http.HeaderNames.HOST)
+ && !headers.containsKey(Http.HeaderNames.X_FORWARDED_HOST)) {
+ headers.put(Http.HeaderNames.X_FORWARDED_HOST, headers.get(Http.HeaderNames.HOST));
}
- return _ws.url(String.format("%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri))
+ return _ws.url(
+ String.format(
+ "%s://%s:%s%s", protocol, metadataServiceHost, metadataServicePort, resolvedUri))
.setMethod(request.method())
- .setHeaders(headers
- .entrySet()
- .stream()
- // Remove X-DataHub-Actor to prevent malicious delegation.
- .filter(entry -> !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase(entry.getKey()))
- .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey()))
- .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey()))
- .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey()))
- // Remove Host s.th. service meshes do not route to wrong host
- .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey()))
- .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue))
- )
+ .setHeaders(
+ headers.entrySet().stream()
+ // Remove X-DataHub-Actor to prevent malicious delegation.
+ .filter(
+ entry ->
+ !AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER.equalsIgnoreCase(
+ entry.getKey()))
+ .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey()))
+ .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey()))
+ .filter(entry -> !Http.HeaderNames.AUTHORIZATION.equalsIgnoreCase(entry.getKey()))
+ // Remove Host s.th. service meshes do not route to wrong host
+ .filter(entry -> !Http.HeaderNames.HOST.equalsIgnoreCase(entry.getKey()))
+ .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)))
.addHeader(Http.HeaderNames.AUTHORIZATION, authorizationHeaderValue)
- .addHeader(AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request))
- .setBody(new InMemoryBodyWritable(ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()), "application/json"))
+ .addHeader(
+ AuthenticationConstants.LEGACY_X_DATAHUB_ACTOR_HEADER, getDataHubActorHeader(request))
+ .setBody(
+ new InMemoryBodyWritable(
+ ByteString.fromByteBuffer(request.body().asBytes().asByteBuffer()),
+ "application/json"))
.setRequestTimeout(Duration.ofSeconds(120))
.execute()
- .thenApply(apiResponse -> {
- final ResponseHeader header = new ResponseHeader(apiResponse.getStatus(), apiResponse.getHeaders()
- .entrySet()
- .stream()
- .filter(entry -> !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey()))
- .filter(entry -> !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey()))
- .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue())))
- .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)));
- final HttpEntity body = new HttpEntity.Strict(apiResponse.getBodyAsBytes(), Optional.ofNullable(apiResponse.getContentType()));
- return new Result(header, body);
- }).toCompletableFuture();
+ .thenApply(
+ apiResponse -> {
+ final ResponseHeader header =
+ new ResponseHeader(
+ apiResponse.getStatus(),
+ apiResponse.getHeaders().entrySet().stream()
+ .filter(
+ entry ->
+ !Http.HeaderNames.CONTENT_LENGTH.equalsIgnoreCase(entry.getKey()))
+ .filter(
+ entry ->
+ !Http.HeaderNames.CONTENT_TYPE.equalsIgnoreCase(entry.getKey()))
+ .map(entry -> Pair.of(entry.getKey(), String.join(";", entry.getValue())))
+ .collect(Collectors.toMap(Pair::getFirst, Pair::getSecond)));
+ final HttpEntity body =
+ new HttpEntity.Strict(
+ apiResponse.getBodyAsBytes(),
+ Optional.ofNullable(apiResponse.getContentType()));
+ return new Result(header, body);
+ })
+ .toCompletableFuture();
}
/**
@@ -173,11 +186,13 @@ public Result appConfig() {
config.put("appVersion", _config.getString("app.version"));
config.put("isInternal", _config.getBoolean("linkedin.internal"));
config.put("shouldShowDatasetLineage", _config.getBoolean("linkedin.show.dataset.lineage"));
- config.put("suggestionConfidenceThreshold",
+ config.put(
+ "suggestionConfidenceThreshold",
Integer.valueOf(_config.getString("linkedin.suggestion.confidence.threshold")));
config.set("wikiLinks", wikiLinks());
config.set("tracking", trackingInfo());
- // In a staging environment, we can trigger this flag to be true so that the UI can handle based on
+ // In a staging environment, we can trigger this flag to be true so that the UI can handle based
+ // on
// such config and alert users that their changes will not affect production data
config.put("isStagingBanner", _config.getBoolean("ui.show.staging.banner"));
config.put("isLiveDataWarning", _config.getBoolean("ui.show.live.data.banner"));
@@ -206,6 +221,7 @@ public Result appConfig() {
/**
* Creates a JSON object of profile / avatar properties
+ *
* @return Json avatar / profile image properties
*/
@Nonnull
@@ -273,23 +289,26 @@ private StandaloneWSClient createWsClient() {
}
/**
- * Returns the value of the Authorization Header to be provided when proxying requests to the downstream Metadata Service.
+ * Returns the value of the Authorization Header to be provided when proxying requests to the
+ * downstream Metadata Service.
*
- * Currently, the Authorization header value may be derived from
+ * Currently, the Authorization header value may be derived from
*
- * a) The value of the "token" attribute of the Session Cookie provided by the client. This value is set
- * when creating the session token initially from a token granted by the Metadata Service.
+ *
a) The value of the "token" attribute of the Session Cookie provided by the client. This
+ * value is set when creating the session token initially from a token granted by the Metadata
+ * Service.
*
- * Or if the "token" attribute cannot be found in a session cookie, then we fallback to
+ *
Or if the "token" attribute cannot be found in a session cookie, then we fallback to
*
- * b) The value of the Authorization
- * header provided in the original request. This will be used in cases where clients are making programmatic requests
- * to Metadata Service APIs directly, without providing a session cookie (ui only).
+ *
b) The value of the Authorization header provided in the original request. This will be used
+ * in cases where clients are making programmatic requests to Metadata Service APIs directly,
+ * without providing a session cookie (ui only).
*
- * If neither are found, an empty string is returned.
+ *
If neither are found, an empty string is returned.
*/
private String getAuthorizationHeaderValueToProxy(Http.Request request) {
- // If the session cookie has an authorization token, use that. If there's an authorization header provided, simply
+ // If the session cookie has an authorization token, use that. If there's an authorization
+ // header provided, simply
// use that.
String value = "";
if (request.session().data().containsKey(SESSION_COOKIE_GMS_TOKEN_NAME)) {
@@ -301,11 +320,13 @@ private String getAuthorizationHeaderValueToProxy(Http.Request request) {
}
/**
- * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This is sent along
- * with any requests that have a valid frontend session cookie to identify the calling actor, for backwards compatibility.
+ * Returns the value of the legacy X-DataHub-Actor header to forward to the Metadata Service. This
+ * is sent along with any requests that have a valid frontend session cookie to identify the
+ * calling actor, for backwards compatibility.
*
- * If Metadata Service authentication is enabled, this value is not required because Actor context will most often come
- * from the authentication credentials provided in the Authorization header.
+ *
If Metadata Service authentication is enabled, this value is not required because Actor
+ * context will most often come from the authentication credentials provided in the Authorization
+ * header.
*/
private String getDataHubActorHeader(Http.Request request) {
String actor = request.session().data().get(ACTOR);
diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java
index e28d4ba2ee37eb..9c232e965a0034 100644
--- a/datahub-frontend/app/controllers/AuthenticationController.java
+++ b/datahub-frontend/app/controllers/AuthenticationController.java
@@ -1,5 +1,9 @@
package controllers;
+import static auth.AuthUtils.*;
+import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX;
+import static org.pac4j.play.store.PlayCookieSessionStore.*;
+
import auth.AuthUtils;
import auth.CookieConfigs;
import auth.JAASConfigs;
@@ -35,325 +39,337 @@
import play.mvc.Results;
import security.AuthenticationManager;
-import static auth.AuthUtils.*;
-import static org.pac4j.core.client.IndirectClient.ATTEMPTED_AUTHENTICATION_SUFFIX;
-import static org.pac4j.play.store.PlayCookieSessionStore.*;
-
-
// TODO add logging.
public class AuthenticationController extends Controller {
- public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging";
- private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri";
- private static final String ERROR_MESSAGE_URI_PARAM = "error_msg";
- private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured";
-
- private static final String SSO_NO_REDIRECT_MESSAGE = "SSO is configured, however missing redirect from idp";
-
- private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName());
- private final CookieConfigs _cookieConfigs;
- private final JAASConfigs _jaasConfigs;
- private final NativeAuthenticationConfigs _nativeAuthenticationConfigs;
- private final boolean _verbose;
-
- @Inject
- private org.pac4j.core.config.Config _ssoConfig;
-
- @Inject
- private PlaySessionStore _playSessionStore;
-
- @Inject
- private SsoManager _ssoManager;
-
- @Inject
- AuthServiceClient _authClient;
-
- @Inject
- public AuthenticationController(@Nonnull Config configs) {
- _cookieConfigs = new CookieConfigs(configs);
- _jaasConfigs = new JAASConfigs(configs);
- _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs);
- _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING);
+ public static final String AUTH_VERBOSE_LOGGING = "auth.verbose.logging";
+ private static final String AUTH_REDIRECT_URI_PARAM = "redirect_uri";
+ private static final String ERROR_MESSAGE_URI_PARAM = "error_msg";
+ private static final String SSO_DISABLED_ERROR_MESSAGE = "SSO is not configured";
+
+ private static final String SSO_NO_REDIRECT_MESSAGE =
+ "SSO is configured, however missing redirect from idp";
+
+ private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName());
+ private final CookieConfigs _cookieConfigs;
+ private final JAASConfigs _jaasConfigs;
+ private final NativeAuthenticationConfigs _nativeAuthenticationConfigs;
+ private final boolean _verbose;
+
+ @Inject private org.pac4j.core.config.Config _ssoConfig;
+
+ @Inject private PlaySessionStore _playSessionStore;
+
+ @Inject private SsoManager _ssoManager;
+
+ @Inject AuthServiceClient _authClient;
+
+ @Inject
+ public AuthenticationController(@Nonnull Config configs) {
+ _cookieConfigs = new CookieConfigs(configs);
+ _jaasConfigs = new JAASConfigs(configs);
+ _nativeAuthenticationConfigs = new NativeAuthenticationConfigs(configs);
+ _verbose = configs.hasPath(AUTH_VERBOSE_LOGGING) && configs.getBoolean(AUTH_VERBOSE_LOGGING);
+ }
+
+ /**
+ * Route used to perform authentication, or redirect to log in if authentication fails.
+ *
+ *
If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider
+ * (Indirect auth). If not, we will fall back to the default username / password login experience
+ * (Direct auth).
+ */
+ @Nonnull
+ public Result authenticate(Http.Request request) {
+
+ // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is
+ // authenticated.
+
+ final Optional maybeRedirectPath =
+ Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM));
+ final String redirectPath = maybeRedirectPath.orElse("/");
+
+ if (AuthUtils.hasValidSessionCookie(request)) {
+ return Results.redirect(redirectPath);
}
- /**
- * Route used to perform authentication, or redirect to log in if authentication fails.
- *
- * If indirect SSO (eg. oidc) is configured, this route will redirect to the identity provider (Indirect auth).
- * If not, we will fall back to the default username / password login experience (Direct auth).
- */
- @Nonnull
- public Result authenticate(Http.Request request) {
-
- // TODO: Call getAuthenticatedUser and then generate a session cookie for the UI if the user is authenticated.
-
- final Optional maybeRedirectPath = Optional.ofNullable(request.getQueryString(AUTH_REDIRECT_URI_PARAM));
- final String redirectPath = maybeRedirectPath.orElse("/");
-
- if (AuthUtils.hasValidSessionCookie(request)) {
- return Results.redirect(redirectPath);
- }
-
- // 1. If SSO is enabled, redirect to IdP if not authenticated.
- if (_ssoManager.isSsoEnabled()) {
- return redirectToIdentityProvider(request, redirectPath).orElse(
- Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))
- );
- }
-
- // 2. If either JAAS auth or Native auth is enabled, fallback to it
- if (_jaasConfigs.isJAASEnabled() || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) {
- return Results.redirect(
- LOGIN_ROUTE + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath)));
- }
-
- // 3. If no auth enabled, fallback to using default user account & redirect.
- // Generate GMS session token, TODO:
- final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId());
- return Results.redirect(redirectPath).withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken))
- .withCookies(
- createActorCookie(
- DEFAULT_ACTOR_URN.toString(),
- _cookieConfigs.getTtlInHours(),
- _cookieConfigs.getAuthCookieSameSite(),
- _cookieConfigs.getAuthCookieSecure()
- )
- );
+ // 1. If SSO is enabled, redirect to IdP if not authenticated.
+ if (_ssoManager.isSsoEnabled()) {
+ return redirectToIdentityProvider(request, redirectPath)
+ .orElse(
+ Results.redirect(
+ LOGIN_ROUTE
+ + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)));
}
- /**
- * Redirect to the identity provider for authentication.
- */
- @Nonnull
- public Result sso(Http.Request request) {
- if (_ssoManager.isSsoEnabled()) {
- return redirectToIdentityProvider(request, "/").orElse(
- Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE))
- );
- }
- return Results.redirect(LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE));
+ // 2. If either JAAS auth or Native auth is enabled, fallback to it
+ if (_jaasConfigs.isJAASEnabled()
+ || _nativeAuthenticationConfigs.isNativeAuthenticationEnabled()) {
+ return Results.redirect(
+ LOGIN_ROUTE
+ + String.format("?%s=%s", AUTH_REDIRECT_URI_PARAM, encodeRedirectUri(redirectPath)));
}
- /**
- * Log in a user based on a username + password.
- *
- * TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the default.
- */
- @Nonnull
- public Result logIn(Http.Request request) {
- boolean jaasEnabled = _jaasConfigs.isJAASEnabled();
- _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled));
- boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled();
- _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled));
- boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled;
- if (noAuthEnabled) {
- String message = "Neither JAAS nor native authentication is enabled on the server.";
- final ObjectNode error = Json.newObject();
- error.put("message", message);
- return Results.badRequest(error);
- }
-
- final JsonNode json = request.body().asJson();
- final String username = json.findPath(USER_NAME).textValue();
- final String password = json.findPath(PASSWORD).textValue();
-
- if (StringUtils.isBlank(username)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
-
- JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials");
- boolean loginSucceeded = tryLogin(username, password);
-
- if (!loginSucceeded) {
- return Results.badRequest(invalidCredsJson);
- }
-
- final Urn actorUrn = new CorpuserUrn(username);
- final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId());
- return createSession(actorUrn.toString(), accessToken);
+ // 3. If no auth enabled, fallback to using default user account & redirect.
+ // Generate GMS session token, TODO:
+ final String accessToken = _authClient.generateSessionTokenForUser(DEFAULT_ACTOR_URN.getId());
+ return Results.redirect(redirectPath)
+ .withSession(createSessionMap(DEFAULT_ACTOR_URN.toString(), accessToken))
+ .withCookies(
+ createActorCookie(
+ DEFAULT_ACTOR_URN.toString(),
+ _cookieConfigs.getTtlInHours(),
+ _cookieConfigs.getAuthCookieSameSite(),
+ _cookieConfigs.getAuthCookieSecure()));
+ }
+
+ /** Redirect to the identity provider for authentication. */
+ @Nonnull
+ public Result sso(Http.Request request) {
+ if (_ssoManager.isSsoEnabled()) {
+ return redirectToIdentityProvider(request, "/")
+ .orElse(
+ Results.redirect(
+ LOGIN_ROUTE
+ + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_NO_REDIRECT_MESSAGE)));
+ }
+ return Results.redirect(
+ LOGIN_ROUTE + String.format("?%s=%s", ERROR_MESSAGE_URI_PARAM, SSO_DISABLED_ERROR_MESSAGE));
+ }
+
+ /**
+ * Log in a user based on a username + password.
+ *
+ * TODO: Implement built-in support for LDAP auth. Currently dummy jaas authentication is the
+ * default.
+ */
+ @Nonnull
+ public Result logIn(Http.Request request) {
+ boolean jaasEnabled = _jaasConfigs.isJAASEnabled();
+ _logger.debug(String.format("Jaas authentication enabled: %b", jaasEnabled));
+ boolean nativeAuthenticationEnabled =
+ _nativeAuthenticationConfigs.isNativeAuthenticationEnabled();
+ _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled));
+ boolean noAuthEnabled = !jaasEnabled && !nativeAuthenticationEnabled;
+ if (noAuthEnabled) {
+ String message = "Neither JAAS nor native authentication is enabled on the server.";
+ final ObjectNode error = Json.newObject();
+ error.put("message", message);
+ return Results.badRequest(error);
}
- /**
- * Sign up a native user based on a name, email, title, and password. The invite token must match an existing invite token.
- *
- */
- @Nonnull
- public Result signUp(Http.Request request) {
- boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled();
- _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled));
- if (!nativeAuthenticationEnabled) {
- String message = "Native authentication is not enabled on the server.";
- final ObjectNode error = Json.newObject();
- error.put("message", message);
- return Results.badRequest(error);
- }
+ final JsonNode json = request.body().asJson();
+ final String username = json.findPath(USER_NAME).textValue();
+ final String password = json.findPath(PASSWORD).textValue();
- final JsonNode json = request.body().asJson();
- final String fullName = json.findPath(FULL_NAME).textValue();
- final String email = json.findPath(EMAIL).textValue();
- final String title = json.findPath(TITLE).textValue();
- final String password = json.findPath(PASSWORD).textValue();
- final String inviteToken = json.findPath(INVITE_TOKEN).textValue();
+ if (StringUtils.isBlank(username)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "User name must not be empty.");
+ return Results.badRequest(invalidCredsJson);
+ }
- if (StringUtils.isBlank(fullName)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Invalid Credentials");
+ boolean loginSucceeded = tryLogin(username, password);
- if (StringUtils.isBlank(email)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
- if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) {
- Constraints.EmailValidator emailValidator = new Constraints.EmailValidator();
- if (!emailValidator.isValid(email)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
- }
+ if (!loginSucceeded) {
+ return Results.badRequest(invalidCredsJson);
+ }
- if (StringUtils.isBlank(password)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
+ final Urn actorUrn = new CorpuserUrn(username);
+ final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId());
+ return createSession(actorUrn.toString(), accessToken);
+ }
+
+ /**
+ * Sign up a native user based on a name, email, title, and password. The invite token must match
+ * an existing invite token.
+ */
+ @Nonnull
+ public Result signUp(Http.Request request) {
+ boolean nativeAuthenticationEnabled =
+ _nativeAuthenticationConfigs.isNativeAuthenticationEnabled();
+ _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled));
+ if (!nativeAuthenticationEnabled) {
+ String message = "Native authentication is not enabled on the server.";
+ final ObjectNode error = Json.newObject();
+ error.put("message", message);
+ return Results.badRequest(error);
+ }
- if (StringUtils.isBlank(title)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
+ final JsonNode json = request.body().asJson();
+ final String fullName = json.findPath(FULL_NAME).textValue();
+ final String email = json.findPath(EMAIL).textValue();
+ final String title = json.findPath(TITLE).textValue();
+ final String password = json.findPath(PASSWORD).textValue();
+ final String inviteToken = json.findPath(INVITE_TOKEN).textValue();
- if (StringUtils.isBlank(inviteToken)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Invite token must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
+ if (StringUtils.isBlank(fullName)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Full name must not be empty.");
+ return Results.badRequest(invalidCredsJson);
+ }
- final Urn userUrn = new CorpuserUrn(email);
- final String userUrnString = userUrn.toString();
- _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken);
- final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId());
- return createSession(userUrnString, accessToken);
+ if (StringUtils.isBlank(email)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty.");
+ return Results.badRequest(invalidCredsJson);
+ }
+ if (_nativeAuthenticationConfigs.isEnforceValidEmailEnabled()) {
+ Constraints.EmailValidator emailValidator = new Constraints.EmailValidator();
+ if (!emailValidator.isValid(email)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty.");
+ return Results.badRequest(invalidCredsJson);
+ }
}
- /**
- * Reset a native user's credentials based on a username, old password, and new password.
- *
- */
- @Nonnull
- public Result resetNativeUserCredentials(Http.Request request) {
- boolean nativeAuthenticationEnabled = _nativeAuthenticationConfigs.isNativeAuthenticationEnabled();
- _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled));
- if (!nativeAuthenticationEnabled) {
- String message = "Native authentication is not enabled on the server.";
- final ObjectNode error = Json.newObject();
- error.put("message", message);
- return badRequest(error);
- }
+ if (StringUtils.isBlank(password)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty.");
+ return Results.badRequest(invalidCredsJson);
+ }
- final JsonNode json = request.body().asJson();
- final String email = json.findPath(EMAIL).textValue();
- final String password = json.findPath(PASSWORD).textValue();
- final String resetToken = json.findPath(RESET_TOKEN).textValue();
+ if (StringUtils.isBlank(title)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Title must not be empty.");
+ return Results.badRequest(invalidCredsJson);
+ }
- if (StringUtils.isBlank(email)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
+ if (StringUtils.isBlank(inviteToken)) {
+ JsonNode invalidCredsJson =
+ Json.newObject().put("message", "Invite token must not be empty.");
+ return Results.badRequest(invalidCredsJson);
+ }
- if (StringUtils.isBlank(password)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
+ final Urn userUrn = new CorpuserUrn(email);
+ final String userUrnString = userUrn.toString();
+ _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken);
+ final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId());
+ return createSession(userUrnString, accessToken);
+ }
+
+ /** Reset a native user's credentials based on a username, old password, and new password. */
+ @Nonnull
+ public Result resetNativeUserCredentials(Http.Request request) {
+ boolean nativeAuthenticationEnabled =
+ _nativeAuthenticationConfigs.isNativeAuthenticationEnabled();
+ _logger.debug(String.format("Native authentication enabled: %b", nativeAuthenticationEnabled));
+ if (!nativeAuthenticationEnabled) {
+ String message = "Native authentication is not enabled on the server.";
+ final ObjectNode error = Json.newObject();
+ error.put("message", message);
+ return badRequest(error);
+ }
- if (StringUtils.isBlank(resetToken)) {
- JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty.");
- return Results.badRequest(invalidCredsJson);
- }
+ final JsonNode json = request.body().asJson();
+ final String email = json.findPath(EMAIL).textValue();
+ final String password = json.findPath(PASSWORD).textValue();
+ final String resetToken = json.findPath(RESET_TOKEN).textValue();
- final Urn userUrn = new CorpuserUrn(email);
- final String userUrnString = userUrn.toString();
- _authClient.resetNativeUserCredentials(userUrnString, password, resetToken);
- final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId());
- return createSession(userUrnString, accessToken);
+ if (StringUtils.isBlank(email)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Email must not be empty.");
+ return Results.badRequest(invalidCredsJson);
}
- private Optional redirectToIdentityProvider(Http.RequestHeader request, String redirectPath) {
- final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore);
- final Client client = _ssoManager.getSsoProvider().client();
- configurePac4jSessionStore(playWebContext, client, redirectPath);
- try {
- final Optional action = client.getRedirectionAction(playWebContext);
- return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext));
- } catch (Exception e) {
- if (_verbose) {
- _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured", e);
- } else {
- _logger.error("Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured");
- }
- return Optional.of(Results.redirect(
- String.format("/login?error_msg=%s",
- URLEncoder.encode("Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, "
- + "or refer to server logs for more information.", StandardCharsets.UTF_8))));
- }
+ if (StringUtils.isBlank(password)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Password must not be empty.");
+ return Results.badRequest(invalidCredsJson);
}
- private void configurePac4jSessionStore(PlayWebContext context, Client client, String redirectPath) {
- // Set the originally requested path for post-auth redirection. We split off into a separate cookie from the session
- // to reduce size of the session cookie
- FoundAction foundAction = new FoundAction(redirectPath);
- byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction);
- String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes));
- context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized));
- // This is to prevent previous login attempts from being cached.
- // We replicate the logic here, which is buried in the Pac4j client.
- if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX) != null) {
- _logger.debug("Found previous login attempt. Removing it manually to prevent unexpected errors.");
- _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, "");
- }
+ if (StringUtils.isBlank(resetToken)) {
+ JsonNode invalidCredsJson = Json.newObject().put("message", "Reset token must not be empty.");
+ return Results.badRequest(invalidCredsJson);
}
- private String encodeRedirectUri(final String redirectUri) {
- return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8);
+ final Urn userUrn = new CorpuserUrn(email);
+ final String userUrnString = userUrn.toString();
+ _authClient.resetNativeUserCredentials(userUrnString, password, resetToken);
+ final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId());
+ return createSession(userUrnString, accessToken);
+ }
+
+ private Optional redirectToIdentityProvider(
+ Http.RequestHeader request, String redirectPath) {
+ final PlayWebContext playWebContext = new PlayWebContext(request, _playSessionStore);
+ final Client client = _ssoManager.getSsoProvider().client();
+ configurePac4jSessionStore(playWebContext, client, redirectPath);
+ try {
+ final Optional action = client.getRedirectionAction(playWebContext);
+ return action.map(act -> new PlayHttpActionAdapter().adapt(act, playWebContext));
+ } catch (Exception e) {
+ if (_verbose) {
+ _logger.error(
+ "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured",
+ e);
+ } else {
+ _logger.error(
+ "Caught exception while attempting to redirect to SSO identity provider! It's likely that SSO integration is mis-configured");
+ }
+ return Optional.of(
+ Results.redirect(
+ String.format(
+ "/login?error_msg=%s",
+ URLEncoder.encode(
+ "Failed to redirect to Single Sign-On provider. Please contact your DataHub Administrator, "
+ + "or refer to server logs for more information.",
+ StandardCharsets.UTF_8))));
}
-
- private boolean tryLogin(String username, String password) {
- boolean loginSucceeded = false;
-
- // First try jaas login, if enabled
- if (_jaasConfigs.isJAASEnabled()) {
- try {
- _logger.debug("Attempting jaas authentication");
- AuthenticationManager.authenticateJaasUser(username, password);
- _logger.debug("Jaas authentication successful. Login succeeded");
- loginSucceeded = true;
- } catch (Exception e) {
- if (_verbose) {
- _logger.debug("Jaas authentication error. Login failed", e);
- } else {
- _logger.debug("Jaas authentication error. Login failed");
- }
- }
- }
-
- // If jaas login fails or is disabled, try native auth login
- if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) {
- final Urn userUrn = new CorpuserUrn(username);
- final String userUrnString = userUrn.toString();
- loginSucceeded = loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password);
+ }
+
+ private void configurePac4jSessionStore(
+ PlayWebContext context, Client client, String redirectPath) {
+ // Set the originally requested path for post-auth redirection. We split off into a separate
+ // cookie from the session
+ // to reduce size of the session cookie
+ FoundAction foundAction = new FoundAction(redirectPath);
+ byte[] javaSerBytes = JAVA_SER_HELPER.serializeToBytes(foundAction);
+ String serialized = Base64.getEncoder().encodeToString(compressBytes(javaSerBytes));
+ context.addResponseCookie(new Cookie(REDIRECT_URL_COOKIE_NAME, serialized));
+ // This is to prevent previous login attempts from being cached.
+ // We replicate the logic here, which is buried in the Pac4j client.
+ if (_playSessionStore.get(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX)
+ != null) {
+ _logger.debug(
+ "Found previous login attempt. Removing it manually to prevent unexpected errors.");
+ _playSessionStore.set(context, client.getName() + ATTEMPTED_AUTHENTICATION_SUFFIX, "");
+ }
+ }
+
+ private String encodeRedirectUri(final String redirectUri) {
+ return URLEncoder.encode(redirectUri, StandardCharsets.UTF_8);
+ }
+
+ private boolean tryLogin(String username, String password) {
+ boolean loginSucceeded = false;
+
+ // First try jaas login, if enabled
+ if (_jaasConfigs.isJAASEnabled()) {
+ try {
+ _logger.debug("Attempting jaas authentication");
+ AuthenticationManager.authenticateJaasUser(username, password);
+ _logger.debug("Jaas authentication successful. Login succeeded");
+ loginSucceeded = true;
+ } catch (Exception e) {
+ if (_verbose) {
+ _logger.debug("Jaas authentication error. Login failed", e);
+ } else {
+ _logger.debug("Jaas authentication error. Login failed");
}
-
- return loginSucceeded;
+ }
}
- private Result createSession(String userUrnString, String accessToken) {
- return Results.ok().withSession(createSessionMap(userUrnString, accessToken))
- .withCookies(
- createActorCookie(
- userUrnString,
- _cookieConfigs.getTtlInHours(),
- _cookieConfigs.getAuthCookieSameSite(),
- _cookieConfigs.getAuthCookieSecure()
- )
- );
-
+ // If jaas login fails or is disabled, try native auth login
+ if (_nativeAuthenticationConfigs.isNativeAuthenticationEnabled() && !loginSucceeded) {
+ final Urn userUrn = new CorpuserUrn(username);
+ final String userUrnString = userUrn.toString();
+ loginSucceeded =
+ loginSucceeded || _authClient.verifyNativeUserCredentials(userUrnString, password);
}
-}
\ No newline at end of file
+
+ return loginSucceeded;
+ }
+
+ private Result createSession(String userUrnString, String accessToken) {
+ return Results.ok()
+ .withSession(createSessionMap(userUrnString, accessToken))
+ .withCookies(
+ createActorCookie(
+ userUrnString,
+ _cookieConfigs.getTtlInHours(),
+ _cookieConfigs.getAuthCookieSameSite(),
+ _cookieConfigs.getAuthCookieSecure()));
+ }
+}
diff --git a/datahub-frontend/app/controllers/CentralLogoutController.java b/datahub-frontend/app/controllers/CentralLogoutController.java
index 5e24fe9f8220cf..eea1c662ebf894 100644
--- a/datahub-frontend/app/controllers/CentralLogoutController.java
+++ b/datahub-frontend/app/controllers/CentralLogoutController.java
@@ -2,18 +2,15 @@
import com.typesafe.config.Config;
import java.net.URLEncoder;
+import java.nio.charset.StandardCharsets;
+import javax.inject.Inject;
import lombok.extern.slf4j.Slf4j;
import org.pac4j.play.LogoutController;
import play.mvc.Http;
import play.mvc.Result;
import play.mvc.Results;
-import javax.inject.Inject;
-import java.nio.charset.StandardCharsets;
-
-/**
- * Responsible for handling logout logic with oidc providers
- */
+/** Responsible for handling logout logic with oidc providers */
@Slf4j
public class CentralLogoutController extends LogoutController {
private static final String AUTH_URL_CONFIG_PATH = "/login";
@@ -28,26 +25,27 @@ public CentralLogoutController(Config config) {
setLogoutUrlPattern(DEFAULT_BASE_URL_PATH + ".*");
setLocalLogout(true);
setCentralLogout(true);
-
}
- /**
- * logout() method should not be called if oidc is not enabled
- */
+ /** logout() method should not be called if oidc is not enabled */
public Result executeLogout(Http.Request request) {
if (_isOidcEnabled) {
try {
return logout(request).toCompletableFuture().get().withNewSession();
} catch (Exception e) {
- log.error("Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.", e);
+ log.error(
+ "Caught exception while attempting to perform SSO logout! It's likely that SSO integration is mis-configured.",
+ e);
return redirect(
- String.format("/login?error_msg=%s",
- URLEncoder.encode("Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, "
- + "or refer to server logs for more information.", StandardCharsets.UTF_8)))
- .withNewSession();
+ String.format(
+ "/login?error_msg=%s",
+ URLEncoder.encode(
+ "Failed to sign out using Single Sign-On provider. Please contact your DataHub Administrator, "
+ + "or refer to server logs for more information.",
+ StandardCharsets.UTF_8)))
+ .withNewSession();
}
}
- return Results.redirect(AUTH_URL_CONFIG_PATH)
- .withNewSession();
+ return Results.redirect(AUTH_URL_CONFIG_PATH).withNewSession();
}
}
diff --git a/datahub-frontend/app/controllers/SsoCallbackController.java b/datahub-frontend/app/controllers/SsoCallbackController.java
index 7a4b5585cc21ab..9f4445b1aa5c7f 100644
--- a/datahub-frontend/app/controllers/SsoCallbackController.java
+++ b/datahub-frontend/app/controllers/SsoCallbackController.java
@@ -1,6 +1,9 @@
package controllers;
import auth.CookieConfigs;
+import auth.sso.SsoManager;
+import auth.sso.SsoProvider;
+import auth.sso.oidc.OidcCallbackLogic;
import client.AuthServiceClient;
import com.datahub.authentication.Authentication;
import com.linkedin.entity.client.SystemEntityClient;
@@ -18,17 +21,13 @@
import org.pac4j.play.PlayWebContext;
import play.mvc.Http;
import play.mvc.Result;
-import auth.sso.oidc.OidcCallbackLogic;
-import auth.sso.SsoManager;
-import auth.sso.SsoProvider;
import play.mvc.Results;
-
/**
* A dedicated Controller for handling redirects to DataHub by 3rd-party Identity Providers after
* off-platform authentication.
*
- * Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines
+ * Handles a single "callback/{protocol}" route, where the protocol (ie. OIDC / SAML) determines
* the handling logic to invoke.
*/
@Slf4j
@@ -46,56 +45,88 @@ public SsoCallbackController(
_ssoManager = ssoManager;
setDefaultUrl("/"); // By default, redirects to Home Page on log in.
setSaveInSession(false);
- setCallbackLogic(new SsoCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, new CookieConfigs(configs)));
+ setCallbackLogic(
+ new SsoCallbackLogic(
+ ssoManager,
+ systemAuthentication,
+ entityClient,
+ authClient,
+ new CookieConfigs(configs)));
}
public CompletionStage handleCallback(String protocol, Http.Request request) {
if (shouldHandleCallback(protocol)) {
log.debug(String.format("Handling SSO callback. Protocol: %s", protocol));
- return callback(request).handle((res, e) -> {
- if (e != null) {
- log.error("Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.", e);
- return Results.redirect(
- String.format("/login?error_msg=%s",
- URLEncoder.encode(
- "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.",
- StandardCharsets.UTF_8)))
- .discardingCookie("actor")
- .withNewSession();
- }
- return res;
- });
+ return callback(request)
+ .handle(
+ (res, e) -> {
+ if (e != null) {
+ log.error(
+ "Caught exception while attempting to handle SSO callback! It's likely that SSO integration is mis-configured.",
+ e);
+ return Results.redirect(
+ String.format(
+ "/login?error_msg=%s",
+ URLEncoder.encode(
+ "Failed to sign in using Single Sign-On provider. Please try again, or contact your DataHub Administrator.",
+ StandardCharsets.UTF_8)))
+ .discardingCookie("actor")
+ .withNewSession();
+ }
+ return res;
+ });
}
- return CompletableFuture.completedFuture(Results.internalServerError(
- String.format("Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol)));
+ return CompletableFuture.completedFuture(
+ Results.internalServerError(
+ String.format(
+ "Failed to perform SSO callback. SSO is not enabled for protocol: %s", protocol)));
}
-
- /**
- * Logic responsible for delegating to protocol-specific callback logic.
- */
+ /** Logic responsible for delegating to protocol-specific callback logic. */
public class SsoCallbackLogic implements CallbackLogic {
private final OidcCallbackLogic _oidcCallbackLogic;
- SsoCallbackLogic(final SsoManager ssoManager, final Authentication systemAuthentication,
- final SystemEntityClient entityClient, final AuthServiceClient authClient, final CookieConfigs cookieConfigs) {
- _oidcCallbackLogic = new OidcCallbackLogic(ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs);
+ SsoCallbackLogic(
+ final SsoManager ssoManager,
+ final Authentication systemAuthentication,
+ final SystemEntityClient entityClient,
+ final AuthServiceClient authClient,
+ final CookieConfigs cookieConfigs) {
+ _oidcCallbackLogic =
+ new OidcCallbackLogic(
+ ssoManager, systemAuthentication, entityClient, authClient, cookieConfigs);
}
@Override
- public Result perform(PlayWebContext context, Config config,
- HttpActionAdapter httpActionAdapter, String defaultUrl, Boolean saveInSession,
- Boolean multiProfile, Boolean renewSession, String defaultClient) {
+ public Result perform(
+ PlayWebContext context,
+ Config config,
+ HttpActionAdapter httpActionAdapter,
+ String defaultUrl,
+ Boolean saveInSession,
+ Boolean multiProfile,
+ Boolean renewSession,
+ String defaultClient) {
if (SsoProvider.SsoProtocol.OIDC.equals(_ssoManager.getSsoProvider().protocol())) {
- return _oidcCallbackLogic.perform(context, config, httpActionAdapter, defaultUrl, saveInSession, multiProfile, renewSession, defaultClient);
+ return _oidcCallbackLogic.perform(
+ context,
+ config,
+ httpActionAdapter,
+ defaultUrl,
+ saveInSession,
+ multiProfile,
+ renewSession,
+ defaultClient);
}
// Should never occur.
- throw new UnsupportedOperationException("Failed to find matching SSO Provider. Only one supported is OIDC.");
+ throw new UnsupportedOperationException(
+ "Failed to find matching SSO Provider. Only one supported is OIDC.");
}
}
private boolean shouldHandleCallback(final String protocol) {
- return _ssoManager.isSsoEnabled() && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol);
+ return _ssoManager.isSsoEnabled()
+ && _ssoManager.getSsoProvider().protocol().getCommonName().equals(protocol);
}
}
diff --git a/datahub-frontend/app/controllers/TrackingController.java b/datahub-frontend/app/controllers/TrackingController.java
index 776ab5cad58ff0..254a8cc640d0c5 100644
--- a/datahub-frontend/app/controllers/TrackingController.java
+++ b/datahub-frontend/app/controllers/TrackingController.java
@@ -1,14 +1,15 @@
package controllers;
+import static auth.AuthUtils.ACTOR;
+
import auth.Authenticator;
import client.AuthServiceClient;
+import client.KafkaTrackingProducer;
import com.fasterxml.jackson.databind.JsonNode;
import com.typesafe.config.Config;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import javax.inject.Singleton;
-
-
import org.apache.kafka.clients.producer.ProducerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -16,57 +17,52 @@
import play.mvc.Http;
import play.mvc.Result;
import play.mvc.Security;
-import client.KafkaTrackingProducer;
-
-import static auth.AuthUtils.ACTOR;
-
// TODO: Migrate this to metadata-service.
@Singleton
public class TrackingController extends Controller {
- private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName());
+ private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName());
- private final String _topic;
+ private final String _topic;
- @Inject
- KafkaTrackingProducer _producer;
+ @Inject KafkaTrackingProducer _producer;
- @Inject
- AuthServiceClient _authClient;
+ @Inject AuthServiceClient _authClient;
- @Inject
- public TrackingController(@Nonnull Config config) {
- _topic = config.getString("analytics.tracking.topic");
- }
+ @Inject
+ public TrackingController(@Nonnull Config config) {
+ _topic = config.getString("analytics.tracking.topic");
+ }
- @Security.Authenticated(Authenticator.class)
- @Nonnull
- public Result track(Http.Request request) throws Exception {
- if (!_producer.isEnabled()) {
- // If tracking is disabled, simply return a 200.
- return status(200);
- }
+ @Security.Authenticated(Authenticator.class)
+ @Nonnull
+ public Result track(Http.Request request) throws Exception {
+ if (!_producer.isEnabled()) {
+ // If tracking is disabled, simply return a 200.
+ return status(200);
+ }
- JsonNode event;
- try {
- event = request.body().asJson();
- } catch (Exception e) {
- return badRequest();
- }
- final String actor = request.session().data().get(ACTOR);
- try {
- _logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event));
- final ProducerRecord record = new ProducerRecord<>(
- _topic,
- actor,
- event.toString());
- _producer.send(record);
- _authClient.track(event.toString());
- return ok();
- } catch (Exception e) {
- _logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event));
- return internalServerError(e.getMessage());
- }
+ JsonNode event;
+ try {
+ event = request.body().asJson();
+ } catch (Exception e) {
+ return badRequest();
+ }
+ final String actor = request.session().data().get(ACTOR);
+ try {
+ _logger.debug(
+ String.format("Emitting product analytics event. actor: %s, event: %s", actor, event));
+ final ProducerRecord record =
+ new ProducerRecord<>(_topic, actor, event.toString());
+ _producer.send(record);
+ _authClient.track(event.toString());
+ return ok();
+ } catch (Exception e) {
+ _logger.error(
+ String.format(
+ "Failed to emit product analytics event. actor: %s, event: %s", actor, event));
+ return internalServerError(e.getMessage());
}
+ }
}
diff --git a/datahub-frontend/app/security/AuthUtil.java b/datahub-frontend/app/security/AuthUtil.java
index 8af90b37a6f31b..55752644ada706 100644
--- a/datahub-frontend/app/security/AuthUtil.java
+++ b/datahub-frontend/app/security/AuthUtil.java
@@ -8,52 +8,53 @@
import javax.crypto.spec.SecretKeySpec;
import org.apache.commons.codec.digest.HmacAlgorithms;
-
-/**
- * Auth Utils
- * Adheres to HSEC requirement for creating application tokens
- */
+/** Auth Utils Adheres to HSEC requirement for creating application tokens */
public final class AuthUtil {
private static final String HMAC_SHA256_ALGORITHM = HmacAlgorithms.HMAC_SHA_256.toString();
private static final String DELIIMITER = ":";
private static final String HEX_CHARS = "0123456789ABCDEF";
- private AuthUtil() { }
+ private AuthUtil() {}
/**
* Generate hash string using the secret HMAC Key
+ *
* @param value value to be hashed
* @param hmacKey secret HMAC key
* @return Hashed string using the secret key
* @throws NoSuchAlgorithmException
* @throws InvalidKeyException
*/
- public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlgorithmException, InvalidKeyException {
- //Time-stamp at Encryption time
+ public static String generateHash(String value, byte[] hmacKey)
+ throws NoSuchAlgorithmException, InvalidKeyException {
+ // Time-stamp at Encryption time
long tStamp = System.currentTimeMillis();
String uTValue = new String();
String cValue;
String finalEncValue;
- //Concatenated Values
+ // Concatenated Values
uTValue = uTValue.concat(value).concat(":").concat(Long.toString(tStamp));
cValue = uTValue;
- //Digest - HMAC-SHA256
+ // Digest - HMAC-SHA256
SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM);
Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM);
mac.init(signingKey);
byte[] rawHmac = mac.doFinal(uTValue.getBytes());
String hmacString = getHex(rawHmac);
- finalEncValue = Base64.getEncoder().encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes()));
+ finalEncValue =
+ Base64.getEncoder()
+ .encodeToString((cValue.concat(DELIIMITER).concat(hmacString).getBytes()));
return finalEncValue;
}
/**
* Validate the one-way hash string
+ *
* @param hashedValue Hashed value to be validated
* @param hmacKey HMAC Key used to create the hash
* @param sessionWindow previously defined session window to validate if the hash is expired
@@ -62,7 +63,7 @@ public static String generateHash(String value, byte[] hmacKey) throws NoSuchAlg
*/
public static String verifyHash(String hashedValue, byte[] hmacKey, long sessionWindow)
throws GeneralSecurityException {
- //Username:Timestamp:SignedHMAC(Username:Timestamp)
+ // Username:Timestamp:SignedHMAC(Username:Timestamp)
String[] decryptedHash = decryptBase64Hash(hashedValue);
String username = decryptedHash[0];
String timestamp = decryptedHash[1];
@@ -70,7 +71,7 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session
long newTStamp = System.currentTimeMillis();
String newUTValue = username.concat(DELIIMITER).concat(timestamp);
- //Digest - HMAC-SHA1 Verify
+ // Digest - HMAC-SHA1 Verify
SecretKeySpec signingKey = new SecretKeySpec(hmacKey, HMAC_SHA256_ALGORITHM);
Mac mac = Mac.getInstance(HMAC_SHA256_ALGORITHM);
mac.init(signingKey);
@@ -87,8 +88,10 @@ public static String verifyHash(String hashedValue, byte[] hmacKey, long session
return decryptedHash[0];
}
+
/**
* Decrypt base64 hash
+ *
* @param value base 64 hash string
* @return Decrypted base 64 string
*/
@@ -96,8 +99,10 @@ private static String[] decryptBase64Hash(String value) {
String decodedBase64 = new String(Base64.getDecoder().decode(value));
return decodedBase64.split(DELIIMITER);
}
+
/**
* Get Hex string from byte array
+ *
* @param raw byte array
* @return Hex representation of the byte array
*/
@@ -114,14 +119,16 @@ private static String getHex(byte[] raw) {
return hex.toString();
}
+
/**
* Compares two HMAC byte arrays
+ *
* @param a HMAC byte array 1
* @param b HMAC byte array 2
* @return true if the two HMAC are identical
*/
private static boolean isEqual(byte[] a, byte[] b) {
- if (a == null || b == null || a.length != b.length) {
+ if (a == null || b == null || a.length != b.length) {
return false;
}
@@ -133,4 +140,4 @@ private static boolean isEqual(byte[] a, byte[] b) {
return result == 0;
}
-}
\ No newline at end of file
+}
diff --git a/datahub-frontend/app/security/AuthenticationManager.java b/datahub-frontend/app/security/AuthenticationManager.java
index 67bcf7e404335f..f46dc57c232bd2 100644
--- a/datahub-frontend/app/security/AuthenticationManager.java
+++ b/datahub-frontend/app/security/AuthenticationManager.java
@@ -15,13 +15,12 @@
import org.eclipse.jetty.jaas.PropertyUserStoreManager;
import play.Logger;
-
public class AuthenticationManager {
- private AuthenticationManager(boolean verbose) {
- }
+ private AuthenticationManager(boolean verbose) {}
- public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password) throws Exception {
+ public static void authenticateJaasUser(@Nonnull String userName, @Nonnull String password)
+ throws Exception {
Preconditions.checkArgument(!StringUtils.isAnyEmpty(userName), "Username cannot be empty");
JAASLoginService jaasLoginService = new JAASLoginService("WHZ-Authentication");
PropertyUserStoreManager propertyUserStoreManager = new PropertyUserStoreManager();
@@ -29,10 +28,12 @@ public static void authenticateJaasUser(@Nonnull String userName, @Nonnull Strin
jaasLoginService.setBeans(Collections.singletonList(propertyUserStoreManager));
JAASLoginService.INSTANCE.set(jaasLoginService);
try {
- LoginContext lc = new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password));
+ LoginContext lc =
+ new LoginContext("WHZ-Authentication", new WHZCallbackHandler(userName, password));
lc.login();
} catch (LoginException le) {
- AuthenticationException authenticationException = new AuthenticationException(le.getMessage());
+ AuthenticationException authenticationException =
+ new AuthenticationException(le.getMessage());
authenticationException.setRootCause(le);
throw authenticationException;
}
@@ -52,7 +53,8 @@ public void handle(@Nonnull Callback[] callbacks) {
NameCallback nc = null;
PasswordCallback pc = null;
for (Callback callback : callbacks) {
- Logger.debug("The submitted callback is of type: " + callback.getClass() + " : " + callback);
+ Logger.debug(
+ "The submitted callback is of type: " + callback.getClass() + " : " + callback);
if (callback instanceof NameCallback) {
nc = (NameCallback) callback;
nc.setName(this.username);
diff --git a/datahub-frontend/app/security/DummyLoginModule.java b/datahub-frontend/app/security/DummyLoginModule.java
index 56822f0805be41..c46fa29e1599ad 100644
--- a/datahub-frontend/app/security/DummyLoginModule.java
+++ b/datahub-frontend/app/security/DummyLoginModule.java
@@ -1,21 +1,22 @@
package security;
+import java.util.Map;
import javax.security.auth.Subject;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.login.LoginException;
import javax.security.auth.spi.LoginModule;
-import java.util.Map;
-
/**
- * This LoginModule performs dummy authentication.
- * Any username and password can work for authentication
+ * This LoginModule performs dummy authentication. Any username and password can work for
+ * authentication
*/
public class DummyLoginModule implements LoginModule {
- public void initialize(final Subject subject, final CallbackHandler callbackHandler,
- final Map sharedState, final Map options) {
- }
+ public void initialize(
+ final Subject subject,
+ final CallbackHandler callbackHandler,
+ final Map sharedState,
+ final Map options) {}
public boolean login() throws LoginException {
return true;
@@ -32,5 +33,4 @@ public boolean abort() throws LoginException {
public boolean logout() throws LoginException {
return true;
}
-
-}
\ No newline at end of file
+}
diff --git a/datahub-frontend/app/utils/ConfigUtil.java b/datahub-frontend/app/utils/ConfigUtil.java
index b99a5e123b9eb9..5c80389c96da49 100644
--- a/datahub-frontend/app/utils/ConfigUtil.java
+++ b/datahub-frontend/app/utils/ConfigUtil.java
@@ -3,18 +3,16 @@
import com.linkedin.util.Configuration;
import com.typesafe.config.Config;
-
public class ConfigUtil {
- private ConfigUtil() {
-
- }
+ private ConfigUtil() {}
// New configurations, provided via application.conf file.
public static final String METADATA_SERVICE_HOST_CONFIG_PATH = "metadataService.host";
public static final String METADATA_SERVICE_PORT_CONFIG_PATH = "metadataService.port";
public static final String METADATA_SERVICE_USE_SSL_CONFIG_PATH = "metadataService.useSsl";
- public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH = "metadataService.sslProtocol";
+ public static final String METADATA_SERVICE_SSL_PROTOCOL_CONFIG_PATH =
+ "metadataService.sslProtocol";
// Legacy env-var based config values, for backwards compatibility:
public static final String GMS_HOST_ENV_VAR = "DATAHUB_GMS_HOST";
@@ -27,10 +25,14 @@ private ConfigUtil() {
public static final String DEFAULT_GMS_PORT = "8080";
public static final String DEFAULT_GMS_USE_SSL = "False";
- public static final String DEFAULT_METADATA_SERVICE_HOST = Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost");
- public static final Integer DEFAULT_METADATA_SERVICE_PORT = Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080"));
- public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL = Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False"));
- public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL = Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR);
+ public static final String DEFAULT_METADATA_SERVICE_HOST =
+ Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, "localhost");
+ public static final Integer DEFAULT_METADATA_SERVICE_PORT =
+ Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, "8080"));
+ public static final Boolean DEFAULT_METADATA_SERVICE_USE_SSL =
+ Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, "False"));
+ public static final String DEFAULT_METADATA_SERVICE_SSL_PROTOCOL =
+ Configuration.getEnvironmentVariable(GMS_SSL_PROTOCOL_VAR);
public static boolean getBoolean(Config config, String key) {
return config.hasPath(key) && config.getBoolean(key);
diff --git a/datahub-frontend/app/utils/SearchUtil.java b/datahub-frontend/app/utils/SearchUtil.java
index 2c52ff5b40156c..803c70a63646a0 100644
--- a/datahub-frontend/app/utils/SearchUtil.java
+++ b/datahub-frontend/app/utils/SearchUtil.java
@@ -2,29 +2,26 @@
import javax.annotation.Nonnull;
-
-/**
- * Utility functions for Search
- */
+/** Utility functions for Search */
public class SearchUtil {
- private SearchUtil() {
- //utility class
- }
+ private SearchUtil() {
+ // utility class
+ }
- /**
- * Returns the string with the forward slash escaped
- * More details on reserved characters in Elasticsearch can be found at,
- * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters
- *
- * @param input
- * @return
- */
- @Nonnull
- public static String escapeForwardSlash(@Nonnull String input) {
- if (input.contains("/")) {
- input = input.replace("/", "\\\\/");
- }
- return input;
+ /**
+ * Returns the string with the forward slash escaped More details on reserved characters in
+ * Elasticsearch can be found at,
+ * https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-query-string-query.html#_reserved_characters
+ *
+ * @param input
+ * @return
+ */
+ @Nonnull
+ public static String escapeForwardSlash(@Nonnull String input) {
+ if (input.contains("/")) {
+ input = input.replace("/", "\\\\/");
}
+ return input;
+ }
}
diff --git a/datahub-frontend/build.gradle b/datahub-frontend/build.gradle
index 9a5fb3210a3115..437c72e6394ea6 100644
--- a/datahub-frontend/build.gradle
+++ b/datahub-frontend/build.gradle
@@ -2,6 +2,7 @@ plugins {
id "io.github.kobylynskyi.graphql.codegen" version "4.1.1"
id 'scala'
id 'com.palantir.docker'
+ id 'org.gradle.playframework'
}
apply from: "../gradle/versioning/versioning.gradle"
@@ -20,7 +21,6 @@ model {
}
task myTar(type: Tar) {
- extension = "tgz"
compression = Compression.GZIP
from("${buildDir}/stage")
@@ -55,8 +55,6 @@ tasks.withType(Checkstyle) {
exclude "**/generated/**"
}
-checkstyleMain.source = "app/"
-
/*
PLAY UPGRADE NOTE
@@ -121,3 +119,23 @@ task cleanLocalDockerImages {
}
}
dockerClean.finalizedBy(cleanLocalDockerImages)
+
+// gradle 8 fixes
+tasks.getByName('createDatahub-frontendTarDist').dependsOn 'stageMainDist'
+tasks.getByName('createDatahub-frontendZipDist').dependsOn 'stageMainDist'
+stagePlayBinaryDist.dependsOn tasks.getByName('createDatahub-frontendStartScripts')
+playBinaryDistTar.dependsOn tasks.getByName('createDatahub-frontendStartScripts')
+playBinaryDistZip.dependsOn tasks.getByName('createDatahub-frontendStartScripts')
+tasks.getByName('stageDatahub-frontendDist').dependsOn stagePlayBinaryDist
+tasks.getByName('stageDatahub-frontendDist').dependsOn createPlayBinaryStartScripts
+tasks.getByName('datahub-frontendDistTar').dependsOn createPlayBinaryStartScripts
+tasks.getByName('datahub-frontendDistTar').dependsOn createMainStartScripts
+tasks.getByName('datahub-frontendDistZip').dependsOn createPlayBinaryStartScripts
+tasks.getByName('datahub-frontendDistZip').dependsOn createMainStartScripts
+playBinaryDistTar.dependsOn createMainStartScripts
+playBinaryDistZip.dependsOn createMainStartScripts
+createMainStartScripts.dependsOn 'stageDatahub-frontendDist'
+createPlayBinaryTarDist.dependsOn 'stageDatahub-frontendDist'
+createPlayBinaryZipDist.dependsOn 'stageDatahub-frontendDist'
+createPlayBinaryTarDist.dependsOn 'stageMainDist'
+createPlayBinaryZipDist.dependsOn 'stageMainDist'
diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle
index dd1ceee411f746..84fb4c02620b8e 100644
--- a/datahub-frontend/play.gradle
+++ b/datahub-frontend/play.gradle
@@ -1,4 +1,3 @@
-apply plugin: "org.gradle.playframework"
// Change this to listen on a different port
project.ext.httpPort = 9001
@@ -101,4 +100,22 @@ play {
test {
useJUnitPlatform()
+
+ def playJava17CompatibleJvmArgs = [
+ "--add-opens=java.base/java.lang=ALL-UNNAMED",
+ //"--add-opens=java.base/java.lang.invoke=ALL-UNNAMED",
+ //"--add-opens=java.base/java.lang.reflect=ALL-UNNAMED",
+ //"--add-opens=java.base/java.io=ALL-UNNAMED",
+ //"--add-opens=java.base/java.net=ALL-UNNAMED",
+ //"--add-opens=java.base/java.nio=ALL-UNNAMED",
+ "--add-opens=java.base/java.util=ALL-UNNAMED",
+ //"--add-opens=java.base/java.util.concurrent=ALL-UNNAMED",
+ //"--add-opens=java.base/java.util.concurrent.atomic=ALL-UNNAMED",
+ //"--add-opens=java.base/sun.nio.ch=ALL-UNNAMED",
+ //"--add-opens=java.base/sun.nio.cs=ALL-UNNAMED",
+ //"--add-opens=java.base/sun.security.action=ALL-UNNAMED",
+ //"--add-opens=java.base/sun.util.calendar=ALL-UNNAMED",
+ //"--add-opens=java.security.jgss/sun.security.krb5=ALL-UNNAMED",
+ ]
+ jvmArgs = playJava17CompatibleJvmArgs
}
diff --git a/datahub-frontend/test/app/ApplicationTest.java b/datahub-frontend/test/app/ApplicationTest.java
index f27fefdb796691..a5da0951d16328 100644
--- a/datahub-frontend/test/app/ApplicationTest.java
+++ b/datahub-frontend/test/app/ApplicationTest.java
@@ -1,11 +1,22 @@
package app;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static play.mvc.Http.Status.NOT_FOUND;
+import static play.mvc.Http.Status.OK;
+import static play.test.Helpers.fakeRequest;
+import static play.test.Helpers.route;
+
import com.nimbusds.jwt.JWT;
import com.nimbusds.jwt.JWTClaimsSet;
import com.nimbusds.jwt.JWTParser;
import controllers.routes;
+import java.io.IOException;
+import java.net.InetAddress;
import java.text.ParseException;
import java.util.Date;
+import java.util.List;
+import java.util.Map;
import no.nav.security.mock.oauth2.MockOAuth2Server;
import no.nav.security.mock.oauth2.token.DefaultOAuth2TokenCallback;
import okhttp3.mockwebserver.MockResponse;
@@ -26,22 +37,9 @@
import play.mvc.Http;
import play.mvc.Result;
import play.test.Helpers;
-
import play.test.TestBrowser;
import play.test.WithBrowser;
-import java.io.IOException;
-import java.net.InetAddress;
-import java.util.List;
-import java.util.Map;
-
-import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertTrue;
-import static play.mvc.Http.Status.NOT_FOUND;
-import static play.mvc.Http.Status.OK;
-import static play.test.Helpers.fakeRequest;
-import static play.test.Helpers.route;
-
@TestInstance(TestInstance.Lifecycle.PER_CLASS)
@SetEnvironmentVariable(key = "DATAHUB_SECRET", value = "test")
@SetEnvironmentVariable(key = "KAFKA_BOOTSTRAP_SERVER", value = "")
@@ -56,11 +54,15 @@ public class ApplicationTest extends WithBrowser {
@Override
protected Application provideApplication() {
return new GuiceApplicationBuilder()
- .configure("metadataService.port", String.valueOf(gmsServerPort()))
- .configure("auth.baseUrl", "http://localhost:" + providePort())
- .configure("auth.oidc.discoveryUri", "http://localhost:" + oauthServerPort()
- + "/testIssuer/.well-known/openid-configuration")
- .in(new Environment(Mode.TEST)).build();
+ .configure("metadataService.port", String.valueOf(gmsServerPort()))
+ .configure("auth.baseUrl", "http://localhost:" + providePort())
+ .configure(
+ "auth.oidc.discoveryUri",
+ "http://localhost:"
+ + oauthServerPort()
+ + "/testIssuer/.well-known/openid-configuration")
+ .in(new Environment(Mode.TEST))
+ .build();
}
@Override
@@ -90,16 +92,20 @@ public int gmsServerPort() {
public void init() throws IOException {
_gmsServer = new MockWebServer();
_gmsServer.enqueue(new MockResponse().setBody(String.format("{\"value\":\"%s\"}", TEST_USER)));
- _gmsServer.enqueue(new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN)));
+ _gmsServer.enqueue(
+ new MockResponse().setBody(String.format("{\"accessToken\":\"%s\"}", TEST_TOKEN)));
_gmsServer.start(gmsServerPort());
_oauthServer = new MockOAuth2Server();
_oauthServer.enqueueCallback(
- new DefaultOAuth2TokenCallback(ISSUER_ID, "testUser", List.of(), Map.of(
- "email", "testUser@myCompany.com",
- "groups", "myGroup"
- ), 600)
- );
+ new DefaultOAuth2TokenCallback(
+ ISSUER_ID,
+ "testUser",
+ List.of(),
+ Map.of(
+ "email", "testUser@myCompany.com",
+ "groups", "myGroup"),
+ 600));
_oauthServer.start(InetAddress.getByName("localhost"), oauthServerPort());
// Discovery url to authorization server metadata
@@ -147,8 +153,9 @@ public void testIndexNotFound() {
@Test
public void testOpenIdConfig() {
- assertEquals("http://localhost:" + oauthServerPort()
- + "/testIssuer/.well-known/openid-configuration", _wellKnownUrl);
+ assertEquals(
+ "http://localhost:" + oauthServerPort() + "/testIssuer/.well-known/openid-configuration",
+ _wellKnownUrl);
}
@Test
@@ -166,8 +173,13 @@ public void testHappyPathOidc() throws ParseException {
Map data = (Map) claims.getClaim("data");
assertEquals(TEST_TOKEN, data.get("token"));
assertEquals(TEST_USER, data.get("actor"));
- // Default expiration is 24h, so should always be less than current time + 1 day since it stamps the time before this executes
- assertTrue(claims.getExpirationTime().compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000))) < 0);
+ // Default expiration is 24h, so should always be less than current time + 1 day since it stamps
+ // the time before this executes
+ assertTrue(
+ claims
+ .getExpirationTime()
+ .compareTo(new Date(System.currentTimeMillis() + (24 * 60 * 60 * 1000)))
+ < 0);
}
@Test
diff --git a/datahub-frontend/test/security/DummyLoginModuleTest.java b/datahub-frontend/test/security/DummyLoginModuleTest.java
index 6727513d884af1..9bf2b5dd4d11c0 100644
--- a/datahub-frontend/test/security/DummyLoginModuleTest.java
+++ b/datahub-frontend/test/security/DummyLoginModuleTest.java
@@ -1,14 +1,12 @@
package security;
-import com.sun.security.auth.callback.TextCallbackHandler;
-import org.junit.jupiter.api.Test;
+import static org.junit.jupiter.api.Assertions.*;
+import com.sun.security.auth.callback.TextCallbackHandler;
import java.util.HashMap;
import javax.security.auth.Subject;
import javax.security.auth.login.LoginException;
-
-import static org.junit.jupiter.api.Assertions.*;
-
+import org.junit.jupiter.api.Test;
public class DummyLoginModuleTest {
diff --git a/datahub-frontend/test/security/OidcConfigurationTest.java b/datahub-frontend/test/security/OidcConfigurationTest.java
index ed16014b58e595..a27a1462a8a277 100644
--- a/datahub-frontend/test/security/OidcConfigurationTest.java
+++ b/datahub-frontend/test/security/OidcConfigurationTest.java
@@ -1,5 +1,8 @@
package security;
+import static auth.sso.oidc.OidcConfigs.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
import auth.sso.oidc.OidcConfigs;
import auth.sso.oidc.OidcProvider;
import com.typesafe.config.Config;
@@ -19,296 +22,290 @@
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
-
import org.junit.jupiter.api.Test;
import org.pac4j.oidc.client.OidcClient;
-import static auth.sso.oidc.OidcConfigs.*;
-import static org.junit.jupiter.api.Assertions.assertEquals;
-
-
public class OidcConfigurationTest {
- private static final com.typesafe.config.Config CONFIG = new Config() {
-
- private final Map _map = new HashMap<>();
-
- @Override
- public ConfigObject root() {
- return null;
- }
-
- @Override
- public ConfigOrigin origin() {
- return null;
- }
-
- @Override
- public Config withFallback(ConfigMergeable other) {
- return null;
- }
-
- @Override
- public Config resolve() {
- return null;
- }
-
- @Override
- public Config resolve(ConfigResolveOptions options) {
- return null;
- }
-
- @Override
- public boolean isResolved() {
- return false;
- }
-
- @Override
- public Config resolveWith(Config source) {
- return null;
- }
-
- @Override
- public Config resolveWith(Config source, ConfigResolveOptions options) {
- return null;
- }
-
- @Override
- public void checkValid(Config reference, String... restrictToPaths) {
-
- }
-
- @Override
- public boolean hasPath(String path) {
- return true;
- }
-
- @Override
- public boolean hasPathOrNull(String path) {
- return false;
- }
-
- @Override
- public boolean isEmpty() {
- return false;
- }
-
- @Override
- public Set> entrySet() {
- return null;
- }
-
- @Override
- public boolean getIsNull(String path) {
- return false;
- }
-
- @Override
- public boolean getBoolean(String path) {
- return false;
- }
-
- @Override
- public Number getNumber(String path) {
- return null;
- }
-
- @Override
- public int getInt(String path) {
- return 0;
- }
-
- @Override
- public long getLong(String path) {
- return 0;
- }
-
- @Override
- public double getDouble(String path) {
- return 0;
- }
-
- @Override
- public String getString(String path) {
- return (String) _map.getOrDefault(path, "1");
- }
-
- @Override
- public > T getEnum(Class enumClass, String path) {
- return null;
- }
-
- @Override
- public ConfigObject getObject(String path) {
- return null;
- }
-
- @Override
- public Config getConfig(String path) {
- return null;
- }
-
- @Override
- public Object getAnyRef(String path) {
- return null;
- }
-
- @Override
- public ConfigValue getValue(String path) {
- return null;
- }
-
- @Override
- public Long getBytes(String path) {
- return null;
- }
-
- @Override
- public ConfigMemorySize getMemorySize(String path) {
- return null;
- }
-
- @Override
- public Long getMilliseconds(String path) {
- return null;
- }
-
- @Override
- public Long getNanoseconds(String path) {
- return null;
- }
-
- @Override
- public long getDuration(String path, TimeUnit unit) {
- return 0;
- }
-
- @Override
- public Duration getDuration(String path) {
- return null;
- }
-
- @Override
- public Period getPeriod(String path) {
- return null;
- }
-
- @Override
- public TemporalAmount getTemporal(String path) {
- return null;
- }
-
- @Override
- public ConfigList getList(String path) {
- return null;
- }
-
- @Override
- public List getBooleanList(String path) {
- return null;
- }
-
- @Override
- public List getNumberList(String path) {
- return null;
- }
-
- @Override
- public List getIntList(String path) {
- return null;
- }
-
- @Override
- public List getLongList(String path) {
- return null;
- }
-
- @Override
- public List getDoubleList(String path) {
- return null;
- }
-
- @Override
- public List getStringList(String path) {
- return null;
- }
-
- @Override
- public > List getEnumList(Class enumClass, String path) {
- return null;
- }
-
- @Override
- public List extends ConfigObject> getObjectList(String path) {
- return null;
- }
-
- @Override
- public List extends Config> getConfigList(String path) {
- return null;
- }
-
- @Override
- public List extends Object> getAnyRefList(String path) {
- return null;
- }
-
- @Override
- public List getBytesList(String path) {
- return null;
- }
-
- @Override
- public List getMemorySizeList(String path) {
- return null;
- }
-
- @Override
- public List getMillisecondsList(String path) {
- return null;
- }
-
- @Override
- public List getNanosecondsList(String path) {
- return null;
- }
-
- @Override
- public List getDurationList(String path, TimeUnit unit) {
- return null;
- }
-
- @Override
- public List getDurationList(String path) {
- return null;
- }
-
- @Override
- public Config withOnlyPath(String path) {
- return null;
- }
-
- @Override
- public Config withoutPath(String path) {
- return null;
- }
-
- @Override
- public Config atPath(String path) {
- return null;
- }
-
- @Override
- public Config atKey(String key) {
- return null;
- }
-
- @Override
- public Config withValue(String path, ConfigValue value) {
- _map.put(path, value.unwrapped());
- return this;
- }
- };
+ private static final com.typesafe.config.Config CONFIG =
+ new Config() {
+
+ private final Map _map = new HashMap<>();
+
+ @Override
+ public ConfigObject root() {
+ return null;
+ }
+
+ @Override
+ public ConfigOrigin origin() {
+ return null;
+ }
+
+ @Override
+ public Config withFallback(ConfigMergeable other) {
+ return null;
+ }
+
+ @Override
+ public Config resolve() {
+ return null;
+ }
+
+ @Override
+ public Config resolve(ConfigResolveOptions options) {
+ return null;
+ }
+
+ @Override
+ public boolean isResolved() {
+ return false;
+ }
+
+ @Override
+ public Config resolveWith(Config source) {
+ return null;
+ }
+
+ @Override
+ public Config resolveWith(Config source, ConfigResolveOptions options) {
+ return null;
+ }
+
+ @Override
+ public void checkValid(Config reference, String... restrictToPaths) {}
+
+ @Override
+ public boolean hasPath(String path) {
+ return true;
+ }
+
+ @Override
+ public boolean hasPathOrNull(String path) {
+ return false;
+ }
+
+ @Override
+ public boolean isEmpty() {
+ return false;
+ }
+
+ @Override
+ public Set> entrySet() {
+ return null;
+ }
+
+ @Override
+ public boolean getIsNull(String path) {
+ return false;
+ }
+
+ @Override
+ public boolean getBoolean(String path) {
+ return false;
+ }
+
+ @Override
+ public Number getNumber(String path) {
+ return null;
+ }
+
+ @Override
+ public int getInt(String path) {
+ return 0;
+ }
+
+ @Override
+ public long getLong(String path) {
+ return 0;
+ }
+
+ @Override
+ public double getDouble(String path) {
+ return 0;
+ }
+
+ @Override
+ public String getString(String path) {
+ return (String) _map.getOrDefault(path, "1");
+ }
+
+ @Override
+ public > T getEnum(Class enumClass, String path) {
+ return null;
+ }
+
+ @Override
+ public ConfigObject getObject(String path) {
+ return null;
+ }
+
+ @Override
+ public Config getConfig(String path) {
+ return null;
+ }
+
+ @Override
+ public Object getAnyRef(String path) {
+ return null;
+ }
+
+ @Override
+ public ConfigValue getValue(String path) {
+ return null;
+ }
+
+ @Override
+ public Long getBytes(String path) {
+ return null;
+ }
+
+ @Override
+ public ConfigMemorySize getMemorySize(String path) {
+ return null;
+ }
+
+ @Override
+ public Long getMilliseconds(String path) {
+ return null;
+ }
+
+ @Override
+ public Long getNanoseconds(String path) {
+ return null;
+ }
+
+ @Override
+ public long getDuration(String path, TimeUnit unit) {
+ return 0;
+ }
+
+ @Override
+ public Duration getDuration(String path) {
+ return null;
+ }
+
+ @Override
+ public Period getPeriod(String path) {
+ return null;
+ }
+
+ @Override
+ public TemporalAmount getTemporal(String path) {
+ return null;
+ }
+
+ @Override
+ public ConfigList getList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getBooleanList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getNumberList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getIntList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getLongList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getDoubleList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getStringList(String path) {
+ return null;
+ }
+
+ @Override
+ public > List getEnumList(Class enumClass, String path) {
+ return null;
+ }
+
+ @Override
+ public List extends ConfigObject> getObjectList(String path) {
+ return null;
+ }
+
+ @Override
+ public List extends Config> getConfigList(String path) {
+ return null;
+ }
+
+ @Override
+ public List extends Object> getAnyRefList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getBytesList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getMemorySizeList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getMillisecondsList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getNanosecondsList(String path) {
+ return null;
+ }
+
+ @Override
+ public List getDurationList(String path, TimeUnit unit) {
+ return null;
+ }
+
+ @Override
+ public List getDurationList(String path) {
+ return null;
+ }
+
+ @Override
+ public Config withOnlyPath(String path) {
+ return null;
+ }
+
+ @Override
+ public Config withoutPath(String path) {
+ return null;
+ }
+
+ @Override
+ public Config atPath(String path) {
+ return null;
+ }
+
+ @Override
+ public Config atKey(String key) {
+ return null;
+ }
+
+ @Override
+ public Config withValue(String path, ConfigValue value) {
+ _map.put(path, value.unwrapped());
+ return this;
+ }
+ };
@Test
public void readTimeoutPropagation() {
diff --git a/datahub-frontend/test/utils/SearchUtilTest.java b/datahub-frontend/test/utils/SearchUtilTest.java
index 428566ae3f4247..6767fa56374692 100644
--- a/datahub-frontend/test/utils/SearchUtilTest.java
+++ b/datahub-frontend/test/utils/SearchUtilTest.java
@@ -1,17 +1,18 @@
package utils;
-import org.junit.jupiter.api.Test;
-
import static org.junit.jupiter.api.Assertions.assertEquals;
+import org.junit.jupiter.api.Test;
+
public class SearchUtilTest {
- @Test
- public void testEscapeForwardSlash() {
- // escape "/"
- assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar"));
- // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to retain the regex behaviour with "*"
- assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*"));
- assertEquals("", "");
- assertEquals("foo", "foo");
- }
+ @Test
+ public void testEscapeForwardSlash() {
+ // escape "/"
+ assertEquals("\\\\/foo\\\\/bar", SearchUtil.escapeForwardSlash("/foo/bar"));
+ // "/" is escaped but "*" is not escaped and is treated as regex. Since currently we want to
+ // retain the regex behaviour with "*"
+ assertEquals("\\\\/foo\\\\/bar\\\\/*", SearchUtil.escapeForwardSlash("/foo/bar/*"));
+ assertEquals("", "");
+ assertEquals("foo", "foo");
+ }
}
diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle
index fba0031351b588..6e8cb939669226 100644
--- a/datahub-graphql-core/build.gradle
+++ b/datahub-graphql-core/build.gradle
@@ -1,7 +1,8 @@
plugins {
+ id 'java'
id "io.github.kobylynskyi.graphql.codegen" version "4.1.1"
}
-apply plugin: 'java'
+
dependencies {
implementation project(':metadata-service:restli-client')
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java
index 4488f27c19d808..e45bed33eb0236 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/Constants.java
@@ -1,29 +1,27 @@
package com.linkedin.datahub.graphql;
-/**
- * Constants relating to GraphQL type system & execution.
- */
+/** Constants relating to GraphQL type system & execution. */
public class Constants {
- private Constants() { };
+ private Constants() {}
+ ;
- public static final String URN_FIELD_NAME = "urn";
- public static final String URNS_FIELD_NAME = "urns";
- public static final String GMS_SCHEMA_FILE = "entity.graphql";
- public static final String SEARCH_SCHEMA_FILE = "search.graphql";
- public static final String APP_SCHEMA_FILE = "app.graphql";
- public static final String AUTH_SCHEMA_FILE = "auth.graphql";
- public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql";
- public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql";
- public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql";
- public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql";
- public static final String TESTS_SCHEMA_FILE = "tests.graphql";
- public static final String STEPS_SCHEMA_FILE = "step.graphql";
- public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql";
- public static final String BROWSE_PATH_DELIMITER = "/";
- public static final String BROWSE_PATH_V2_DELIMITER = "␟";
- public static final String VERSION_STAMP_FIELD_NAME = "versionStamp";
-
- public static final String ENTITY_FILTER_NAME = "_entityType";
+ public static final String URN_FIELD_NAME = "urn";
+ public static final String URNS_FIELD_NAME = "urns";
+ public static final String GMS_SCHEMA_FILE = "entity.graphql";
+ public static final String SEARCH_SCHEMA_FILE = "search.graphql";
+ public static final String APP_SCHEMA_FILE = "app.graphql";
+ public static final String AUTH_SCHEMA_FILE = "auth.graphql";
+ public static final String ANALYTICS_SCHEMA_FILE = "analytics.graphql";
+ public static final String RECOMMENDATIONS_SCHEMA_FILE = "recommendation.graphql";
+ public static final String INGESTION_SCHEMA_FILE = "ingestion.graphql";
+ public static final String TIMELINE_SCHEMA_FILE = "timeline.graphql";
+ public static final String TESTS_SCHEMA_FILE = "tests.graphql";
+ public static final String STEPS_SCHEMA_FILE = "step.graphql";
+ public static final String LINEAGE_SCHEMA_FILE = "lineage.graphql";
+ public static final String BROWSE_PATH_DELIMITER = "/";
+ public static final String BROWSE_PATH_V2_DELIMITER = "␟";
+ public static final String VERSION_STAMP_FIELD_NAME = "versionStamp";
+ public static final String ENTITY_FILTER_NAME = "_entityType";
}
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java
index 9ea8126a07ab2b..f0cb56b1a99ce4 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java
@@ -1,5 +1,9 @@
package com.linkedin.datahub.graphql;
+import static com.linkedin.datahub.graphql.Constants.*;
+import static com.linkedin.metadata.Constants.*;
+import static graphql.scalars.ExtendedScalars.*;
+
import com.datahub.authentication.AuthenticationConfiguration;
import com.datahub.authentication.group.GroupService;
import com.datahub.authentication.invite.InviteTokenService;
@@ -68,7 +72,6 @@
import com.linkedin.datahub.graphql.generated.ListQueriesResult;
import com.linkedin.datahub.graphql.generated.ListTestsResult;
import com.linkedin.datahub.graphql.generated.ListViewsResult;
-import com.linkedin.datahub.graphql.generated.MatchedField;
import com.linkedin.datahub.graphql.generated.MLFeature;
import com.linkedin.datahub.graphql.generated.MLFeatureProperties;
import com.linkedin.datahub.graphql.generated.MLFeatureTable;
@@ -78,6 +81,7 @@
import com.linkedin.datahub.graphql.generated.MLModelProperties;
import com.linkedin.datahub.graphql.generated.MLPrimaryKey;
import com.linkedin.datahub.graphql.generated.MLPrimaryKeyProperties;
+import com.linkedin.datahub.graphql.generated.MatchedField;
import com.linkedin.datahub.graphql.generated.Notebook;
import com.linkedin.datahub.graphql.generated.Owner;
import com.linkedin.datahub.graphql.generated.OwnershipTypeEntity;
@@ -284,7 +288,6 @@
import com.linkedin.datahub.graphql.types.dataset.VersionedDatasetType;
import com.linkedin.datahub.graphql.types.dataset.mappers.DatasetProfileMapper;
import com.linkedin.datahub.graphql.types.domain.DomainType;
-import com.linkedin.datahub.graphql.types.rolemetadata.RoleType;
import com.linkedin.datahub.graphql.types.glossary.GlossaryNodeType;
import com.linkedin.datahub.graphql.types.glossary.GlossaryTermType;
import com.linkedin.datahub.graphql.types.mlmodel.MLFeatureTableType;
@@ -297,6 +300,7 @@
import com.linkedin.datahub.graphql.types.policy.DataHubPolicyType;
import com.linkedin.datahub.graphql.types.query.QueryType;
import com.linkedin.datahub.graphql.types.role.DataHubRoleType;
+import com.linkedin.datahub.graphql.types.rolemetadata.RoleType;
import com.linkedin.datahub.graphql.types.schemafield.SchemaFieldType;
import com.linkedin.datahub.graphql.types.tag.TagType;
import com.linkedin.datahub.graphql.types.test.TestType;
@@ -352,205 +356,191 @@
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderOptions;
-import static com.linkedin.datahub.graphql.Constants.*;
-import static com.linkedin.metadata.Constants.*;
-import static graphql.scalars.ExtendedScalars.*;
-
-
/**
- * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS graph.
+ * A {@link GraphQLEngine} configured to provide access to the entities and aspects on the the GMS
+ * graph.
*/
@Slf4j
@Getter
public class GmsGraphQLEngine {
- private final EntityClient entityClient;
- private final SystemEntityClient systemEntityClient;
- private final GraphClient graphClient;
- private final UsageClient usageClient;
- private final SiblingGraphService siblingGraphService;
-
- private final EntityService entityService;
- private final AnalyticsService analyticsService;
- private final RecommendationsService recommendationsService;
- private final EntityRegistry entityRegistry;
- private final StatefulTokenService statefulTokenService;
- private final SecretService secretService;
- private final GitVersion gitVersion;
- private final boolean supportsImpactAnalysis;
- private final TimeseriesAspectService timeseriesAspectService;
- private final TimelineService timelineService;
- private final NativeUserService nativeUserService;
- private final GroupService groupService;
- private final RoleService roleService;
- private final InviteTokenService inviteTokenService;
- private final PostService postService;
- private final SettingsService settingsService;
- private final ViewService viewService;
- private final OwnershipTypeService ownershipTypeService;
- private final LineageService lineageService;
- private final QueryService queryService;
- private final DataProductService dataProductService;
-
- private final FeatureFlags featureFlags;
-
- private final IngestionConfiguration ingestionConfiguration;
- private final AuthenticationConfiguration authenticationConfiguration;
- private final AuthorizationConfiguration authorizationConfiguration;
- private final VisualConfiguration visualConfiguration;
- private final TelemetryConfiguration telemetryConfiguration;
- private final TestsConfiguration testsConfiguration;
- private final DataHubConfiguration datahubConfiguration;
- private final ViewsConfiguration viewsConfiguration;
-
- private final DatasetType datasetType;
-
- private final RoleType roleType;
-
- private final CorpUserType corpUserType;
- private final CorpGroupType corpGroupType;
- private final ChartType chartType;
- private final DashboardType dashboardType;
- private final DataPlatformType dataPlatformType;
- private final TagType tagType;
- private final MLModelType mlModelType;
- private final MLModelGroupType mlModelGroupType;
- private final MLFeatureType mlFeatureType;
- private final MLFeatureTableType mlFeatureTableType;
- private final MLPrimaryKeyType mlPrimaryKeyType;
- private final DataFlowType dataFlowType;
- private final DataJobType dataJobType;
- private final GlossaryTermType glossaryTermType;
- private final GlossaryNodeType glossaryNodeType;
- private final AspectType aspectType;
- private final ContainerType containerType;
- private final DomainType domainType;
- private final NotebookType notebookType;
- private final AssertionType assertionType;
- private final VersionedDatasetType versionedDatasetType;
- private final DataPlatformInstanceType dataPlatformInstanceType;
- private final AccessTokenMetadataType accessTokenMetadataType;
- private final TestType testType;
- private final DataHubPolicyType dataHubPolicyType;
- private final DataHubRoleType dataHubRoleType;
- private final SchemaFieldType schemaFieldType;
- private final DataHubViewType dataHubViewType;
- private final QueryType queryType;
- private final DataProductType dataProductType;
- private final OwnershipType ownershipType;
-
- /**
- * A list of GraphQL Plugins that extend the core engine
- */
- private final List graphQLPlugins;
-
- /**
- * Configures the graph objects that can be fetched primary key.
- */
- public final List> entityTypes;
-
- /**
- * Configures all graph objects
- */
- public final List> loadableTypes;
-
- /**
- * Configures the graph objects for owner
- */
- public final List> ownerTypes;
-
- /**
- * Configures the graph objects that can be searched.
- */
- public final List> searchableTypes;
-
- /**
- * Configures the graph objects that can be browsed.
- */
- public final List> browsableTypes;
-
- public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) {
-
- this.graphQLPlugins = List.of(
+ private final EntityClient entityClient;
+ private final SystemEntityClient systemEntityClient;
+ private final GraphClient graphClient;
+ private final UsageClient usageClient;
+ private final SiblingGraphService siblingGraphService;
+
+ private final EntityService entityService;
+ private final AnalyticsService analyticsService;
+ private final RecommendationsService recommendationsService;
+ private final EntityRegistry entityRegistry;
+ private final StatefulTokenService statefulTokenService;
+ private final SecretService secretService;
+ private final GitVersion gitVersion;
+ private final boolean supportsImpactAnalysis;
+ private final TimeseriesAspectService timeseriesAspectService;
+ private final TimelineService timelineService;
+ private final NativeUserService nativeUserService;
+ private final GroupService groupService;
+ private final RoleService roleService;
+ private final InviteTokenService inviteTokenService;
+ private final PostService postService;
+ private final SettingsService settingsService;
+ private final ViewService viewService;
+ private final OwnershipTypeService ownershipTypeService;
+ private final LineageService lineageService;
+ private final QueryService queryService;
+ private final DataProductService dataProductService;
+
+ private final FeatureFlags featureFlags;
+
+ private final IngestionConfiguration ingestionConfiguration;
+ private final AuthenticationConfiguration authenticationConfiguration;
+ private final AuthorizationConfiguration authorizationConfiguration;
+ private final VisualConfiguration visualConfiguration;
+ private final TelemetryConfiguration telemetryConfiguration;
+ private final TestsConfiguration testsConfiguration;
+ private final DataHubConfiguration datahubConfiguration;
+ private final ViewsConfiguration viewsConfiguration;
+
+ private final DatasetType datasetType;
+
+ private final RoleType roleType;
+
+ private final CorpUserType corpUserType;
+ private final CorpGroupType corpGroupType;
+ private final ChartType chartType;
+ private final DashboardType dashboardType;
+ private final DataPlatformType dataPlatformType;
+ private final TagType tagType;
+ private final MLModelType mlModelType;
+ private final MLModelGroupType mlModelGroupType;
+ private final MLFeatureType mlFeatureType;
+ private final MLFeatureTableType mlFeatureTableType;
+ private final MLPrimaryKeyType mlPrimaryKeyType;
+ private final DataFlowType dataFlowType;
+ private final DataJobType dataJobType;
+ private final GlossaryTermType glossaryTermType;
+ private final GlossaryNodeType glossaryNodeType;
+ private final AspectType aspectType;
+ private final ContainerType containerType;
+ private final DomainType domainType;
+ private final NotebookType notebookType;
+ private final AssertionType assertionType;
+ private final VersionedDatasetType versionedDatasetType;
+ private final DataPlatformInstanceType dataPlatformInstanceType;
+ private final AccessTokenMetadataType accessTokenMetadataType;
+ private final TestType testType;
+ private final DataHubPolicyType dataHubPolicyType;
+ private final DataHubRoleType dataHubRoleType;
+ private final SchemaFieldType schemaFieldType;
+ private final DataHubViewType dataHubViewType;
+ private final QueryType queryType;
+ private final DataProductType dataProductType;
+ private final OwnershipType ownershipType;
+
+ /** A list of GraphQL Plugins that extend the core engine */
+ private final List graphQLPlugins;
+
+ /** Configures the graph objects that can be fetched primary key. */
+ public final List> entityTypes;
+
+ /** Configures all graph objects */
+ public final List> loadableTypes;
+
+ /** Configures the graph objects for owner */
+ public final List> ownerTypes;
+
+ /** Configures the graph objects that can be searched. */
+ public final List> searchableTypes;
+
+ /** Configures the graph objects that can be browsed. */
+ public final List> browsableTypes;
+
+ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) {
+
+ this.graphQLPlugins =
+ List.of(
// Add new plugins here
- );
-
- this.graphQLPlugins.forEach(plugin -> plugin.init(args));
-
- this.entityClient = args.entityClient;
- this.systemEntityClient = args.systemEntityClient;
- this.graphClient = args.graphClient;
- this.usageClient = args.usageClient;
- this.siblingGraphService = args.siblingGraphService;
-
- this.analyticsService = args.analyticsService;
- this.entityService = args.entityService;
- this.recommendationsService = args.recommendationsService;
- this.statefulTokenService = args.statefulTokenService;
- this.secretService = args.secretService;
- this.entityRegistry = args.entityRegistry;
- this.gitVersion = args.gitVersion;
- this.supportsImpactAnalysis = args.supportsImpactAnalysis;
- this.timeseriesAspectService = args.timeseriesAspectService;
- this.timelineService = args.timelineService;
- this.nativeUserService = args.nativeUserService;
- this.groupService = args.groupService;
- this.roleService = args.roleService;
- this.inviteTokenService = args.inviteTokenService;
- this.postService = args.postService;
- this.viewService = args.viewService;
- this.ownershipTypeService = args.ownershipTypeService;
- this.settingsService = args.settingsService;
- this.lineageService = args.lineageService;
- this.queryService = args.queryService;
- this.dataProductService = args.dataProductService;
-
- this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration);
- this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration);
- this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration);
- this.visualConfiguration = args.visualConfiguration;
- this.telemetryConfiguration = args.telemetryConfiguration;
- this.testsConfiguration = args.testsConfiguration;
- this.datahubConfiguration = args.datahubConfiguration;
- this.viewsConfiguration = args.viewsConfiguration;
- this.featureFlags = args.featureFlags;
-
- this.datasetType = new DatasetType(entityClient);
- this.roleType = new RoleType(entityClient);
- this.corpUserType = new CorpUserType(entityClient, featureFlags);
- this.corpGroupType = new CorpGroupType(entityClient);
- this.chartType = new ChartType(entityClient);
- this.dashboardType = new DashboardType(entityClient);
- this.dataPlatformType = new DataPlatformType(entityClient);
- this.tagType = new TagType(entityClient);
- this.mlModelType = new MLModelType(entityClient);
- this.mlModelGroupType = new MLModelGroupType(entityClient);
- this.mlFeatureType = new MLFeatureType(entityClient);
- this.mlFeatureTableType = new MLFeatureTableType(entityClient);
- this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient);
- this.dataFlowType = new DataFlowType(entityClient);
- this.dataJobType = new DataJobType(entityClient);
- this.glossaryTermType = new GlossaryTermType(entityClient);
- this.glossaryNodeType = new GlossaryNodeType(entityClient);
- this.aspectType = new AspectType(entityClient);
- this.containerType = new ContainerType(entityClient);
- this.domainType = new DomainType(entityClient);
- this.notebookType = new NotebookType(entityClient);
- this.assertionType = new AssertionType(entityClient);
- this.versionedDatasetType = new VersionedDatasetType(entityClient);
- this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient);
- this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient);
- this.testType = new TestType(entityClient);
- this.dataHubPolicyType = new DataHubPolicyType(entityClient);
- this.dataHubRoleType = new DataHubRoleType(entityClient);
- this.schemaFieldType = new SchemaFieldType();
- this.dataHubViewType = new DataHubViewType(entityClient);
- this.queryType = new QueryType(entityClient);
- this.dataProductType = new DataProductType(entityClient);
- this.ownershipType = new OwnershipType(entityClient);
-
- // Init Lists
- this.entityTypes = ImmutableList.of(
+ );
+
+ this.graphQLPlugins.forEach(plugin -> plugin.init(args));
+
+ this.entityClient = args.entityClient;
+ this.systemEntityClient = args.systemEntityClient;
+ this.graphClient = args.graphClient;
+ this.usageClient = args.usageClient;
+ this.siblingGraphService = args.siblingGraphService;
+
+ this.analyticsService = args.analyticsService;
+ this.entityService = args.entityService;
+ this.recommendationsService = args.recommendationsService;
+ this.statefulTokenService = args.statefulTokenService;
+ this.secretService = args.secretService;
+ this.entityRegistry = args.entityRegistry;
+ this.gitVersion = args.gitVersion;
+ this.supportsImpactAnalysis = args.supportsImpactAnalysis;
+ this.timeseriesAspectService = args.timeseriesAspectService;
+ this.timelineService = args.timelineService;
+ this.nativeUserService = args.nativeUserService;
+ this.groupService = args.groupService;
+ this.roleService = args.roleService;
+ this.inviteTokenService = args.inviteTokenService;
+ this.postService = args.postService;
+ this.viewService = args.viewService;
+ this.ownershipTypeService = args.ownershipTypeService;
+ this.settingsService = args.settingsService;
+ this.lineageService = args.lineageService;
+ this.queryService = args.queryService;
+ this.dataProductService = args.dataProductService;
+
+ this.ingestionConfiguration = Objects.requireNonNull(args.ingestionConfiguration);
+ this.authenticationConfiguration = Objects.requireNonNull(args.authenticationConfiguration);
+ this.authorizationConfiguration = Objects.requireNonNull(args.authorizationConfiguration);
+ this.visualConfiguration = args.visualConfiguration;
+ this.telemetryConfiguration = args.telemetryConfiguration;
+ this.testsConfiguration = args.testsConfiguration;
+ this.datahubConfiguration = args.datahubConfiguration;
+ this.viewsConfiguration = args.viewsConfiguration;
+ this.featureFlags = args.featureFlags;
+
+ this.datasetType = new DatasetType(entityClient);
+ this.roleType = new RoleType(entityClient);
+ this.corpUserType = new CorpUserType(entityClient, featureFlags);
+ this.corpGroupType = new CorpGroupType(entityClient);
+ this.chartType = new ChartType(entityClient);
+ this.dashboardType = new DashboardType(entityClient);
+ this.dataPlatformType = new DataPlatformType(entityClient);
+ this.tagType = new TagType(entityClient);
+ this.mlModelType = new MLModelType(entityClient);
+ this.mlModelGroupType = new MLModelGroupType(entityClient);
+ this.mlFeatureType = new MLFeatureType(entityClient);
+ this.mlFeatureTableType = new MLFeatureTableType(entityClient);
+ this.mlPrimaryKeyType = new MLPrimaryKeyType(entityClient);
+ this.dataFlowType = new DataFlowType(entityClient);
+ this.dataJobType = new DataJobType(entityClient);
+ this.glossaryTermType = new GlossaryTermType(entityClient);
+ this.glossaryNodeType = new GlossaryNodeType(entityClient);
+ this.aspectType = new AspectType(entityClient);
+ this.containerType = new ContainerType(entityClient);
+ this.domainType = new DomainType(entityClient);
+ this.notebookType = new NotebookType(entityClient);
+ this.assertionType = new AssertionType(entityClient);
+ this.versionedDatasetType = new VersionedDatasetType(entityClient);
+ this.dataPlatformInstanceType = new DataPlatformInstanceType(entityClient);
+ this.accessTokenMetadataType = new AccessTokenMetadataType(entityClient);
+ this.testType = new TestType(entityClient);
+ this.dataHubPolicyType = new DataHubPolicyType(entityClient);
+ this.dataHubRoleType = new DataHubRoleType(entityClient);
+ this.schemaFieldType = new SchemaFieldType();
+ this.dataHubViewType = new DataHubViewType(entityClient);
+ this.queryType = new QueryType(entityClient);
+ this.dataProductType = new DataProductType(entityClient);
+ this.ownershipType = new OwnershipType(entityClient);
+
+ // Init Lists
+ this.entityTypes =
+ ImmutableList.of(
datasetType,
roleType,
corpUserType,
@@ -582,1262 +572,1867 @@ public GmsGraphQLEngine(final GmsGraphQLEngineArgs args) {
dataHubViewType,
queryType,
dataProductType,
- ownershipType
- );
- this.loadableTypes = new ArrayList<>(entityTypes);
- // Extend loadable types with types from the plugins
- // This allows us to offer search and browse capabilities out of the box for those types
- for (GmsGraphQLPlugin plugin: this.graphQLPlugins) {
- Collection extends LoadableType, ?>> pluginLoadableTypes = plugin.getLoadableTypes();
- if (pluginLoadableTypes != null) {
- this.loadableTypes.addAll(pluginLoadableTypes);
- }
- }
- this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType);
- this.searchableTypes = loadableTypes.stream()
+ ownershipType);
+ this.loadableTypes = new ArrayList<>(entityTypes);
+ // Extend loadable types with types from the plugins
+ // This allows us to offer search and browse capabilities out of the box for those types
+ for (GmsGraphQLPlugin plugin : this.graphQLPlugins) {
+ Collection extends LoadableType, ?>> pluginLoadableTypes = plugin.getLoadableTypes();
+ if (pluginLoadableTypes != null) {
+ this.loadableTypes.addAll(pluginLoadableTypes);
+ }
+ }
+ this.ownerTypes = ImmutableList.of(corpUserType, corpGroupType);
+ this.searchableTypes =
+ loadableTypes.stream()
.filter(type -> (type instanceof SearchableEntityType, ?>))
.map(type -> (SearchableEntityType, ?>) type)
.collect(Collectors.toList());
- this.browsableTypes = loadableTypes.stream()
+ this.browsableTypes =
+ loadableTypes.stream()
.filter(type -> (type instanceof BrowsableEntityType, ?>))
.map(type -> (BrowsableEntityType, ?>) type)
.collect(Collectors.toList());
- }
+ }
- /**
- * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from
- * a {@link LoadableType}.
- */
- public Map>> loaderSuppliers(final Collection extends LoadableType, ?>> loadableTypes) {
- return loadableTypes
- .stream()
- .collect(Collectors.toMap(
+ /**
+ * Returns a {@link Supplier} responsible for creating a new {@link DataLoader} from a {@link
+ * LoadableType}.
+ */
+ public Map>> loaderSuppliers(
+ final Collection extends LoadableType, ?>> loadableTypes) {
+ return loadableTypes.stream()
+ .collect(
+ Collectors.toMap(
LoadableType::name,
- (graphType) -> (context) -> createDataLoader(graphType, context)
- ));
- }
+ (graphType) -> (context) -> createDataLoader(graphType, context)));
+ }
- /**
- * Final call to wire up any extra resolvers the plugin might want to add on
- * @param builder
- */
- private void configurePluginResolvers(final RuntimeWiring.Builder builder) {
- this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this));
- }
-
-
- public void configureRuntimeWiring(final RuntimeWiring.Builder builder) {
- configureQueryResolvers(builder);
- configureMutationResolvers(builder);
- configureGenericEntityResolvers(builder);
- configureDatasetResolvers(builder);
- configureCorpUserResolvers(builder);
- configureCorpGroupResolvers(builder);
- configureDashboardResolvers(builder);
- configureNotebookResolvers(builder);
- configureChartResolvers(builder);
- configureTypeResolvers(builder);
- configureTypeExtensions(builder);
- configureTagAssociationResolver(builder);
- configureGlossaryTermAssociationResolver(builder);
- configureDataJobResolvers(builder);
- configureDataFlowResolvers(builder);
- configureMLFeatureTableResolvers(builder);
- configureGlossaryRelationshipResolvers(builder);
- configureIngestionSourceResolvers(builder);
- configureAnalyticsResolvers(builder);
- configureContainerResolvers(builder);
- configureDataPlatformInstanceResolvers(builder);
- configureGlossaryTermResolvers(builder);
- configureOrganisationRoleResolvers(builder);
- configureGlossaryNodeResolvers(builder);
- configureDomainResolvers(builder);
- configureDataProductResolvers(builder);
- configureAssertionResolvers(builder);
- configurePolicyResolvers(builder);
- configureDataProcessInstanceResolvers(builder);
- configureVersionedDatasetResolvers(builder);
- configureAccessAccessTokenMetadataResolvers(builder);
- configureTestResultResolvers(builder);
- configureRoleResolvers(builder);
- configureSchemaFieldResolvers(builder);
- configureEntityPathResolvers(builder);
- configureViewResolvers(builder);
- configureQueryEntityResolvers(builder);
- configureOwnershipTypeResolver(builder);
- configurePluginResolvers(builder);
- }
-
- private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) {
- builder.type("Role", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- );
- builder.type("RoleAssociation", typeWiring -> typeWiring
- .dataFetcher("role",
- new LoadableTypeResolver<>(roleType,
- (env) -> ((com.linkedin.datahub.graphql.generated.RoleAssociation)
- env.getSource()).getRole().getUrn()))
- );
- builder.type("RoleUser", typeWiring -> typeWiring
- .dataFetcher("user",
- new LoadableTypeResolver<>(corpUserType,
- (env) -> ((com.linkedin.datahub.graphql.generated.RoleUser)
- env.getSource()).getUser().getUrn()))
- );
+ /**
+ * Final call to wire up any extra resolvers the plugin might want to add on
+ *
+ * @param builder
+ */
+ private void configurePluginResolvers(final RuntimeWiring.Builder builder) {
+ this.graphQLPlugins.forEach(plugin -> plugin.configureExtraResolvers(builder, this));
+ }
+
+ public void configureRuntimeWiring(final RuntimeWiring.Builder builder) {
+ configureQueryResolvers(builder);
+ configureMutationResolvers(builder);
+ configureGenericEntityResolvers(builder);
+ configureDatasetResolvers(builder);
+ configureCorpUserResolvers(builder);
+ configureCorpGroupResolvers(builder);
+ configureDashboardResolvers(builder);
+ configureNotebookResolvers(builder);
+ configureChartResolvers(builder);
+ configureTypeResolvers(builder);
+ configureTypeExtensions(builder);
+ configureTagAssociationResolver(builder);
+ configureGlossaryTermAssociationResolver(builder);
+ configureDataJobResolvers(builder);
+ configureDataFlowResolvers(builder);
+ configureMLFeatureTableResolvers(builder);
+ configureGlossaryRelationshipResolvers(builder);
+ configureIngestionSourceResolvers(builder);
+ configureAnalyticsResolvers(builder);
+ configureContainerResolvers(builder);
+ configureDataPlatformInstanceResolvers(builder);
+ configureGlossaryTermResolvers(builder);
+ configureOrganisationRoleResolvers(builder);
+ configureGlossaryNodeResolvers(builder);
+ configureDomainResolvers(builder);
+ configureDataProductResolvers(builder);
+ configureAssertionResolvers(builder);
+ configurePolicyResolvers(builder);
+ configureDataProcessInstanceResolvers(builder);
+ configureVersionedDatasetResolvers(builder);
+ configureAccessAccessTokenMetadataResolvers(builder);
+ configureTestResultResolvers(builder);
+ configureRoleResolvers(builder);
+ configureSchemaFieldResolvers(builder);
+ configureEntityPathResolvers(builder);
+ configureViewResolvers(builder);
+ configureQueryEntityResolvers(builder);
+ configureOwnershipTypeResolver(builder);
+ configurePluginResolvers(builder);
+ }
+
+ private void configureOrganisationRoleResolvers(RuntimeWiring.Builder builder) {
+ builder.type(
+ "Role",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)));
+ builder.type(
+ "RoleAssociation",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "role",
+ new LoadableTypeResolver<>(
+ roleType,
+ (env) ->
+ ((com.linkedin.datahub.graphql.generated.RoleAssociation) env.getSource())
+ .getRole()
+ .getUrn())));
+ builder.type(
+ "RoleUser",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "user",
+ new LoadableTypeResolver<>(
+ corpUserType,
+ (env) ->
+ ((com.linkedin.datahub.graphql.generated.RoleUser) env.getSource())
+ .getUser()
+ .getUrn())));
+ }
+
+ public GraphQLEngine.Builder builder() {
+ final GraphQLEngine.Builder builder = GraphQLEngine.builder();
+ builder
+ .addSchema(fileBasedSchema(GMS_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(APP_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE))
+ .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE));
+
+ for (GmsGraphQLPlugin plugin : this.graphQLPlugins) {
+ List pluginSchemaFiles = plugin.getSchemaFiles();
+ if (pluginSchemaFiles != null) {
+ pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema)));
+ }
+ Collection extends LoadableType, ?>> pluginLoadableTypes = plugin.getLoadableTypes();
+ if (pluginLoadableTypes != null) {
+ pluginLoadableTypes.forEach(
+ loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes)));
+ }
}
-
- public GraphQLEngine.Builder builder() {
- final GraphQLEngine.Builder builder = GraphQLEngine.builder();
- builder
- .addSchema(fileBasedSchema(GMS_SCHEMA_FILE))
- .addSchema(fileBasedSchema(SEARCH_SCHEMA_FILE))
- .addSchema(fileBasedSchema(APP_SCHEMA_FILE))
- .addSchema(fileBasedSchema(AUTH_SCHEMA_FILE))
- .addSchema(fileBasedSchema(ANALYTICS_SCHEMA_FILE))
- .addSchema(fileBasedSchema(RECOMMENDATIONS_SCHEMA_FILE))
- .addSchema(fileBasedSchema(INGESTION_SCHEMA_FILE))
- .addSchema(fileBasedSchema(TIMELINE_SCHEMA_FILE))
- .addSchema(fileBasedSchema(TESTS_SCHEMA_FILE))
- .addSchema(fileBasedSchema(STEPS_SCHEMA_FILE))
- .addSchema(fileBasedSchema(LINEAGE_SCHEMA_FILE));
-
- for (GmsGraphQLPlugin plugin: this.graphQLPlugins) {
- List pluginSchemaFiles = plugin.getSchemaFiles();
- if (pluginSchemaFiles != null) {
- pluginSchemaFiles.forEach(schema -> builder.addSchema(fileBasedSchema(schema)));
- }
- Collection extends LoadableType, ?>> pluginLoadableTypes = plugin.getLoadableTypes();
- if (pluginLoadableTypes != null) {
- pluginLoadableTypes.forEach(loadableType -> builder.addDataLoaders(loaderSuppliers(pluginLoadableTypes)));
- }
- }
- builder
- .addDataLoaders(loaderSuppliers(loadableTypes))
- .addDataLoader("Aspect", context -> createDataLoader(aspectType, context))
- .configureRuntimeWiring(this::configureRuntimeWiring);
- return builder;
+ builder
+ .addDataLoaders(loaderSuppliers(loadableTypes))
+ .addDataLoader("Aspect", context -> createDataLoader(aspectType, context))
+ .configureRuntimeWiring(this::configureRuntimeWiring);
+ return builder;
+ }
+
+ public static String fileBasedSchema(String fileName) {
+ String schema;
+ try {
+ InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName);
+ schema = IOUtils.toString(is, StandardCharsets.UTF_8);
+ is.close();
+ } catch (IOException e) {
+ throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e);
}
-
- public static String fileBasedSchema(String fileName) {
- String schema;
- try {
- InputStream is = Thread.currentThread().getContextClassLoader().getResourceAsStream(fileName);
- schema = IOUtils.toString(is, StandardCharsets.UTF_8);
- is.close();
- } catch (IOException e) {
- throw new RuntimeException("Failed to find GraphQL Schema with name " + fileName, e);
- }
- return schema;
+ return schema;
+ }
+
+ private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) {
+ final boolean isAnalyticsEnabled = analyticsService != null;
+ builder
+ .type(
+ "Query",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled)))
+ .type(
+ "AnalyticsChart",
+ typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver()));
+ if (isAnalyticsEnabled) {
+ builder.type(
+ "Query",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "getAnalyticsCharts", new GetChartsResolver(analyticsService, entityClient))
+ .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService))
+ .dataFetcher(
+ "getMetadataAnalyticsCharts",
+ new GetMetadataAnalyticsResolver(entityClient)));
}
+ }
- private void configureAnalyticsResolvers(final RuntimeWiring.Builder builder) {
- final boolean isAnalyticsEnabled = analyticsService != null;
- builder.type("Query", typeWiring -> typeWiring.dataFetcher("isAnalyticsEnabled", new IsAnalyticsEnabledResolver(isAnalyticsEnabled)))
- .type("AnalyticsChart", typeWiring -> typeWiring.typeResolver(new AnalyticsChartTypeResolver()));
- if (isAnalyticsEnabled) {
- builder.type("Query", typeWiring -> typeWiring.dataFetcher("getAnalyticsCharts",
- new GetChartsResolver(analyticsService, entityClient))
- .dataFetcher("getHighlights", new GetHighlightsResolver(analyticsService))
- .dataFetcher("getMetadataAnalyticsCharts", new GetMetadataAnalyticsResolver(entityClient)));
- }
- }
-
- private void configureContainerResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("Container", typeWiring -> typeWiring
+ private void configureContainerResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Container",
+ typeWiring ->
+ typeWiring
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
.dataFetcher("entities", new ContainerEntitiesResolver(entityClient))
.dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("platform",
- new LoadableTypeResolver<>(dataPlatformType,
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
(env) -> ((Container) env.getSource()).getPlatform().getUrn()))
- .dataFetcher("container",
- new LoadableTypeResolver<>(containerType,
+ .dataFetcher(
+ "container",
+ new LoadableTypeResolver<>(
+ containerType,
(env) -> {
- final Container container = env.getSource();
- return container.getContainer() != null ? container.getContainer().getUrn() : null;
- })
- )
+ final Container container = env.getSource();
+ return container.getContainer() != null
+ ? container.getContainer().getUrn()
+ : null;
+ }))
.dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
(env) -> {
- final Container container = env.getSource();
- return container.getDataPlatformInstance() != null ? container.getDataPlatformInstance().getUrn() : null;
- })
- )
- );
- }
-
- private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("DataPlatformInstance", typeWiring -> typeWiring
- .dataFetcher("platform",
- new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn()))
- );
- }
-
- private void configureQueryResolvers(final RuntimeWiring.Builder builder) {
- builder.type("Query", typeWiring -> typeWiring
- .dataFetcher("appConfig",
- new AppConfigResolver(gitVersion, analyticsService != null,
- this.ingestionConfiguration,
- this.authenticationConfiguration,
- this.authorizationConfiguration,
- this.supportsImpactAnalysis,
- this.visualConfiguration,
- this.telemetryConfiguration,
- this.testsConfiguration,
- this.datahubConfiguration,
- this.viewsConfiguration,
- this.featureFlags
- ))
- .dataFetcher("me", new MeResolver(this.entityClient, featureFlags))
- .dataFetcher("search", new SearchResolver(this.entityClient))
- .dataFetcher("searchAcrossEntities", new SearchAcrossEntitiesResolver(this.entityClient, this.viewService))
- .dataFetcher("scrollAcrossEntities", new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService))
- .dataFetcher("searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient))
- .dataFetcher("scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient))
- .dataFetcher("aggregateAcrossEntities", new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService))
- .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes))
- .dataFetcher("autoCompleteForMultiple", new AutoCompleteForMultipleResolver(searchableTypes, this.viewService))
- .dataFetcher("browse", new BrowseResolver(browsableTypes))
- .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes))
- .dataFetcher("dataset", getResolver(datasetType))
- .dataFetcher("role", getResolver(roleType))
- .dataFetcher("versionedDataset", getResolver(versionedDatasetType,
- (env) -> new VersionedUrn().setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME)))
- .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME))))
- .dataFetcher("notebook", getResolver(notebookType))
- .dataFetcher("corpUser", getResolver(corpUserType))
- .dataFetcher("corpGroup", getResolver(corpGroupType))
- .dataFetcher("dashboard", getResolver(dashboardType))
- .dataFetcher("chart", getResolver(chartType))
- .dataFetcher("tag", getResolver(tagType))
- .dataFetcher("dataFlow", getResolver(dataFlowType))
- .dataFetcher("dataJob", getResolver(dataJobType))
- .dataFetcher("glossaryTerm", getResolver(glossaryTermType))
- .dataFetcher("glossaryNode", getResolver(glossaryNodeType))
- .dataFetcher("domain", getResolver((domainType)))
- .dataFetcher("dataPlatform", getResolver(dataPlatformType))
- .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType))
- .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType))
- .dataFetcher("mlFeature", getResolver(mlFeatureType))
- .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType))
- .dataFetcher("mlModel", getResolver(mlModelType))
- .dataFetcher("mlModelGroup", getResolver(mlModelGroupType))
- .dataFetcher("assertion", getResolver(assertionType))
- .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient))
- .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver())
- .dataFetcher("listUsers", new ListUsersResolver(this.entityClient))
- .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient))
- .dataFetcher("listRecommendations", new ListRecommendationsResolver(recommendationsService))
- .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient))
- .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService))
- .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient))
- .dataFetcher("container", getResolver(containerType))
- .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient))
- .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient))
- .dataFetcher("getSecretValues", new GetSecretValuesResolver(this.entityClient, this.secretService))
- .dataFetcher("listIngestionSources", new ListIngestionSourcesResolver(this.entityClient))
- .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient))
- .dataFetcher("executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient))
- .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService))
- .dataFetcher("getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService))
- .dataFetcher("test", getResolver(testType))
- .dataFetcher("listTests", new ListTestsResolver(entityClient))
- .dataFetcher("getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient))
- .dataFetcher("getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient))
- .dataFetcher("entityExists", new EntityExistsResolver(this.entityService))
- .dataFetcher("entity", getEntityResolver())
- .dataFetcher("entities", getEntitiesResolver())
- .dataFetcher("listRoles", new ListRolesResolver(this.entityClient))
- .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService))
- .dataFetcher("listPosts", new ListPostsResolver(this.entityClient))
- .dataFetcher("batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient))
- .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient))
- .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient))
- .dataFetcher("globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService))
- .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient))
- .dataFetcher("getQuickFilters", new GetQuickFiltersResolver(this.entityClient, this.viewService))
- .dataFetcher("dataProduct", getResolver(dataProductType))
- .dataFetcher("listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient))
- .dataFetcher("listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient))
- .dataFetcher("browseV2", new BrowseV2Resolver(this.entityClient, this.viewService))
- );
- }
-
- private DataFetcher getEntitiesResolver() {
- return new BatchGetEntitiesResolver(entityTypes,
- (env) -> {
- List urns = env.getArgument(URNS_FIELD_NAME);
- return urns.stream().map((urn) -> {
+ final Container container = env.getSource();
+ return container.getDataPlatformInstance() != null
+ ? container.getDataPlatformInstance().getUrn()
+ : null;
+ })));
+ }
+
+ private void configureDataPlatformInstanceResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "DataPlatformInstance",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((DataPlatformInstance) env.getSource()).getPlatform().getUrn())));
+ }
+
+ private void configureQueryResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Query",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "appConfig",
+ new AppConfigResolver(
+ gitVersion,
+ analyticsService != null,
+ this.ingestionConfiguration,
+ this.authenticationConfiguration,
+ this.authorizationConfiguration,
+ this.supportsImpactAnalysis,
+ this.visualConfiguration,
+ this.telemetryConfiguration,
+ this.testsConfiguration,
+ this.datahubConfiguration,
+ this.viewsConfiguration,
+ this.featureFlags))
+ .dataFetcher("me", new MeResolver(this.entityClient, featureFlags))
+ .dataFetcher("search", new SearchResolver(this.entityClient))
+ .dataFetcher(
+ "searchAcrossEntities",
+ new SearchAcrossEntitiesResolver(this.entityClient, this.viewService))
+ .dataFetcher(
+ "scrollAcrossEntities",
+ new ScrollAcrossEntitiesResolver(this.entityClient, this.viewService))
+ .dataFetcher(
+ "searchAcrossLineage", new SearchAcrossLineageResolver(this.entityClient))
+ .dataFetcher(
+ "scrollAcrossLineage", new ScrollAcrossLineageResolver(this.entityClient))
+ .dataFetcher(
+ "aggregateAcrossEntities",
+ new AggregateAcrossEntitiesResolver(this.entityClient, this.viewService))
+ .dataFetcher("autoComplete", new AutoCompleteResolver(searchableTypes))
+ .dataFetcher(
+ "autoCompleteForMultiple",
+ new AutoCompleteForMultipleResolver(searchableTypes, this.viewService))
+ .dataFetcher("browse", new BrowseResolver(browsableTypes))
+ .dataFetcher("browsePaths", new BrowsePathsResolver(browsableTypes))
+ .dataFetcher("dataset", getResolver(datasetType))
+ .dataFetcher("role", getResolver(roleType))
+ .dataFetcher(
+ "versionedDataset",
+ getResolver(
+ versionedDatasetType,
+ (env) ->
+ new VersionedUrn()
+ .setUrn(UrnUtils.getUrn(env.getArgument(URN_FIELD_NAME)))
+ .setVersionStamp(env.getArgument(VERSION_STAMP_FIELD_NAME))))
+ .dataFetcher("notebook", getResolver(notebookType))
+ .dataFetcher("corpUser", getResolver(corpUserType))
+ .dataFetcher("corpGroup", getResolver(corpGroupType))
+ .dataFetcher("dashboard", getResolver(dashboardType))
+ .dataFetcher("chart", getResolver(chartType))
+ .dataFetcher("tag", getResolver(tagType))
+ .dataFetcher("dataFlow", getResolver(dataFlowType))
+ .dataFetcher("dataJob", getResolver(dataJobType))
+ .dataFetcher("glossaryTerm", getResolver(glossaryTermType))
+ .dataFetcher("glossaryNode", getResolver(glossaryNodeType))
+ .dataFetcher("domain", getResolver((domainType)))
+ .dataFetcher("dataPlatform", getResolver(dataPlatformType))
+ .dataFetcher("dataPlatformInstance", getResolver(dataPlatformInstanceType))
+ .dataFetcher("mlFeatureTable", getResolver(mlFeatureTableType))
+ .dataFetcher("mlFeature", getResolver(mlFeatureType))
+ .dataFetcher("mlPrimaryKey", getResolver(mlPrimaryKeyType))
+ .dataFetcher("mlModel", getResolver(mlModelType))
+ .dataFetcher("mlModelGroup", getResolver(mlModelGroupType))
+ .dataFetcher("assertion", getResolver(assertionType))
+ .dataFetcher("listPolicies", new ListPoliciesResolver(this.entityClient))
+ .dataFetcher("getGrantedPrivileges", new GetGrantedPrivilegesResolver())
+ .dataFetcher("listUsers", new ListUsersResolver(this.entityClient))
+ .dataFetcher("listGroups", new ListGroupsResolver(this.entityClient))
+ .dataFetcher(
+ "listRecommendations", new ListRecommendationsResolver(recommendationsService))
+ .dataFetcher("getEntityCounts", new EntityCountsResolver(this.entityClient))
+ .dataFetcher("getAccessToken", new GetAccessTokenResolver(statefulTokenService))
+ .dataFetcher("listAccessTokens", new ListAccessTokensResolver(this.entityClient))
+ .dataFetcher("container", getResolver(containerType))
+ .dataFetcher("listDomains", new ListDomainsResolver(this.entityClient))
+ .dataFetcher("listSecrets", new ListSecretsResolver(this.entityClient))
+ .dataFetcher(
+ "getSecretValues",
+ new GetSecretValuesResolver(this.entityClient, this.secretService))
+ .dataFetcher(
+ "listIngestionSources", new ListIngestionSourcesResolver(this.entityClient))
+ .dataFetcher("ingestionSource", new GetIngestionSourceResolver(this.entityClient))
+ .dataFetcher(
+ "executionRequest", new GetIngestionExecutionRequestResolver(this.entityClient))
+ .dataFetcher("getSchemaBlame", new GetSchemaBlameResolver(this.timelineService))
+ .dataFetcher(
+ "getSchemaVersionList", new GetSchemaVersionListResolver(this.timelineService))
+ .dataFetcher("test", getResolver(testType))
+ .dataFetcher("listTests", new ListTestsResolver(entityClient))
+ .dataFetcher(
+ "getRootGlossaryTerms", new GetRootGlossaryTermsResolver(this.entityClient))
+ .dataFetcher(
+ "getRootGlossaryNodes", new GetRootGlossaryNodesResolver(this.entityClient))
+ .dataFetcher("entityExists", new EntityExistsResolver(this.entityService))
+ .dataFetcher("entity", getEntityResolver())
+ .dataFetcher("entities", getEntitiesResolver())
+ .dataFetcher("listRoles", new ListRolesResolver(this.entityClient))
+ .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService))
+ .dataFetcher("listPosts", new ListPostsResolver(this.entityClient))
+ .dataFetcher(
+ "batchGetStepStates", new BatchGetStepStatesResolver(this.entityClient))
+ .dataFetcher("listMyViews", new ListMyViewsResolver(this.entityClient))
+ .dataFetcher("listGlobalViews", new ListGlobalViewsResolver(this.entityClient))
+ .dataFetcher(
+ "globalViewsSettings", new GlobalViewsSettingsResolver(this.settingsService))
+ .dataFetcher("listQueries", new ListQueriesResolver(this.entityClient))
+ .dataFetcher(
+ "getQuickFilters",
+ new GetQuickFiltersResolver(this.entityClient, this.viewService))
+ .dataFetcher("dataProduct", getResolver(dataProductType))
+ .dataFetcher(
+ "listDataProductAssets", new ListDataProductAssetsResolver(this.entityClient))
+ .dataFetcher(
+ "listOwnershipTypes", new ListOwnershipTypesResolver(this.entityClient))
+ .dataFetcher(
+ "browseV2", new BrowseV2Resolver(this.entityClient, this.viewService)));
+ }
+
+ private DataFetcher getEntitiesResolver() {
+ return new BatchGetEntitiesResolver(
+ entityTypes,
+ (env) -> {
+ List urns = env.getArgument(URNS_FIELD_NAME);
+ return urns.stream()
+ .map(
+ (urn) -> {
try {
- Urn entityUrn = Urn.createFromString(urn);
- return UrnToEntityMapper.map(entityUrn);
+ Urn entityUrn = Urn.createFromString(urn);
+ return UrnToEntityMapper.map(entityUrn);
} catch (Exception e) {
- throw new RuntimeException("Failed to get entity", e);
+ throw new RuntimeException("Failed to get entity", e);
}
- }).collect(Collectors.toList());
- });
- }
+ })
+ .collect(Collectors.toList());
+ });
+ }
+
+ private DataFetcher getEntityResolver() {
+ return new EntityTypeResolver(
+ entityTypes,
+ (env) -> {
+ try {
+ Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME));
+ return UrnToEntityMapper.map(urn);
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to get entity", e);
+ }
+ });
+ }
+
+ private DataFetcher getResolver(LoadableType, String> loadableType) {
+ return getResolver(loadableType, this::getUrnField);
+ }
+
+ private DataFetcher getResolver(
+ LoadableType loadableType, Function keyProvider) {
+ return new LoadableTypeResolver<>(loadableType, keyProvider);
+ }
+
+ private String getUrnField(DataFetchingEnvironment env) {
+ return env.getArgument(URN_FIELD_NAME);
+ }
+
+ private void configureMutationResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Mutation",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType))
+ .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType))
+ .dataFetcher(
+ "createTag", new CreateTagResolver(this.entityClient, this.entityService))
+ .dataFetcher("updateTag", new MutableTypeResolver<>(tagType))
+ .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService))
+ .dataFetcher("deleteTag", new DeleteTagResolver(entityClient))
+ .dataFetcher("updateChart", new MutableTypeResolver<>(chartType))
+ .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType))
+ .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType))
+ .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType))
+ .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType))
+ .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType))
+ .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType))
+ .dataFetcher("addTag", new AddTagResolver(entityService))
+ .dataFetcher("addTags", new AddTagsResolver(entityService))
+ .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService))
+ .dataFetcher("removeTag", new RemoveTagResolver(entityService))
+ .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService))
+ .dataFetcher("addTerm", new AddTermResolver(entityService))
+ .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService))
+ .dataFetcher("addTerms", new AddTermsResolver(entityService))
+ .dataFetcher("removeTerm", new RemoveTermResolver(entityService))
+ .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService))
+ .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient))
+ .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient))
+ .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient))
+ .dataFetcher(
+ "updateDescription",
+ new UpdateDescriptionResolver(entityService, this.entityClient))
+ .dataFetcher("addOwner", new AddOwnerResolver(entityService))
+ .dataFetcher("addOwners", new AddOwnersResolver(entityService))
+ .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService))
+ .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService))
+ .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService))
+ .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient))
+ .dataFetcher("removeLink", new RemoveLinkResolver(entityService))
+ .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService))
+ .dataFetcher(
+ "removeGroupMembers", new RemoveGroupMembersResolver(this.groupService))
+ .dataFetcher("createGroup", new CreateGroupResolver(this.groupService))
+ .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient))
+ .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient))
+ .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient))
+ .dataFetcher(
+ "createDomain", new CreateDomainResolver(this.entityClient, this.entityService))
+ .dataFetcher(
+ "moveDomain", new MoveDomainResolver(this.entityService, this.entityClient))
+ .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient))
+ .dataFetcher(
+ "setDomain", new SetDomainResolver(this.entityClient, this.entityService))
+ .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService))
+ .dataFetcher(
+ "updateDeprecation",
+ new UpdateDeprecationResolver(this.entityClient, this.entityService))
+ .dataFetcher(
+ "batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService))
+ .dataFetcher(
+ "unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService))
+ .dataFetcher(
+ "createSecret", new CreateSecretResolver(this.entityClient, this.secretService))
+ .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient))
+ .dataFetcher(
+ "createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService))
+ .dataFetcher(
+ "revokeAccessToken",
+ new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService))
+ .dataFetcher(
+ "createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient))
+ .dataFetcher(
+ "updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient))
+ .dataFetcher(
+ "deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient))
+ .dataFetcher(
+ "createIngestionExecutionRequest",
+ new CreateIngestionExecutionRequestResolver(
+ this.entityClient, this.ingestionConfiguration))
+ .dataFetcher(
+ "cancelIngestionExecutionRequest",
+ new CancelIngestionExecutionRequestResolver(this.entityClient))
+ .dataFetcher(
+ "createTestConnectionRequest",
+ new CreateTestConnectionRequestResolver(
+ this.entityClient, this.ingestionConfiguration))
+ .dataFetcher(
+ "deleteAssertion",
+ new DeleteAssertionResolver(this.entityClient, this.entityService))
+ .dataFetcher("createTest", new CreateTestResolver(this.entityClient))
+ .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient))
+ .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient))
+ .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient))
+ .dataFetcher(
+ "createGlossaryTerm",
+ new CreateGlossaryTermResolver(this.entityClient, this.entityService))
+ .dataFetcher(
+ "createGlossaryNode",
+ new CreateGlossaryNodeResolver(this.entityClient, this.entityService))
+ .dataFetcher(
+ "updateParentNode",
+ new UpdateParentNodeResolver(this.entityService, this.entityClient))
+ .dataFetcher(
+ "deleteGlossaryEntity",
+ new DeleteGlossaryEntityResolver(this.entityClient, this.entityService))
+ .dataFetcher(
+ "updateName", new UpdateNameResolver(this.entityService, this.entityClient))
+ .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService))
+ .dataFetcher(
+ "removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService))
+ .dataFetcher(
+ "createNativeUserResetToken",
+ new CreateNativeUserResetTokenResolver(this.nativeUserService))
+ .dataFetcher(
+ "batchUpdateSoftDeleted",
+ new BatchUpdateSoftDeletedResolver(this.entityService))
+ .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService))
+ .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient))
+ .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService))
+ .dataFetcher(
+ "createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService))
+ .dataFetcher(
+ "acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService))
+ .dataFetcher("createPost", new CreatePostResolver(this.postService))
+ .dataFetcher("deletePost", new DeletePostResolver(this.postService))
+ .dataFetcher(
+ "batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient))
+ .dataFetcher("createView", new CreateViewResolver(this.viewService))
+ .dataFetcher("updateView", new UpdateViewResolver(this.viewService))
+ .dataFetcher("deleteView", new DeleteViewResolver(this.viewService))
+ .dataFetcher(
+ "updateGlobalViewsSettings",
+ new UpdateGlobalViewsSettingsResolver(this.settingsService))
+ .dataFetcher(
+ "updateCorpUserViewsSettings",
+ new UpdateCorpUserViewsSettingsResolver(this.settingsService))
+ .dataFetcher(
+ "updateLineage",
+ new UpdateLineageResolver(this.entityService, this.lineageService))
+ .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService))
+ .dataFetcher("createQuery", new CreateQueryResolver(this.queryService))
+ .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService))
+ .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService))
+ .dataFetcher(
+ "createDataProduct", new CreateDataProductResolver(this.dataProductService))
+ .dataFetcher(
+ "updateDataProduct", new UpdateDataProductResolver(this.dataProductService))
+ .dataFetcher(
+ "deleteDataProduct", new DeleteDataProductResolver(this.dataProductService))
+ .dataFetcher(
+ "batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService))
+ .dataFetcher(
+ "createOwnershipType",
+ new CreateOwnershipTypeResolver(this.ownershipTypeService))
+ .dataFetcher(
+ "updateOwnershipType",
+ new UpdateOwnershipTypeResolver(this.ownershipTypeService))
+ .dataFetcher(
+ "deleteOwnershipType",
+ new DeleteOwnershipTypeResolver(this.ownershipTypeService)));
+ }
+
+ private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "SearchResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((SearchResult) env.getSource()).getEntity())))
+ .type(
+ "MatchedField",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((MatchedField) env.getSource()).getEntity())))
+ .type(
+ "SearchAcrossLineageResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes,
+ (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity())))
+ .type(
+ "AggregationMetadata",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((AggregationMetadata) env.getSource()).getEntity())))
+ .type(
+ "RecommendationContent",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes,
+ (env) -> ((RecommendationContent) env.getSource()).getEntity())))
+ .type(
+ "BrowseResults",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entities",
+ new EntityTypeBatchResolver(
+ entityTypes, (env) -> ((BrowseResults) env.getSource()).getEntities())))
+ .type(
+ "ParentDomainsResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "domains",
+ new EntityTypeBatchResolver(
+ entityTypes,
+ (env) -> {
+ final ParentDomainsResult result = env.getSource();
+ return result != null ? result.getDomains() : null;
+ })))
+ .type(
+ "EntityRelationshipLegacy",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes,
+ (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity())))
+ .type(
+ "EntityRelationship",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((EntityRelationship) env.getSource()).getEntity())))
+ .type(
+ "BrowseResultGroupV2",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity())))
+ .type(
+ "BrowsePathEntry",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((BrowsePathEntry) env.getSource()).getEntity())))
+ .type(
+ "LineageRelationship",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes,
+ (env) -> ((LineageRelationship) env.getSource()).getEntity()))
+ .dataFetcher(
+ "createdActor",
+ new EntityTypeResolver(
+ entityTypes,
+ (env) -> {
+ final LineageRelationship relationship = env.getSource();
+ return relationship.getCreatedActor() != null
+ ? relationship.getCreatedActor()
+ : null;
+ }))
+ .dataFetcher(
+ "updatedActor",
+ new EntityTypeResolver(
+ entityTypes,
+ (env) -> {
+ final LineageRelationship relationship = env.getSource();
+ return relationship.getUpdatedActor() != null
+ ? relationship.getUpdatedActor()
+ : null;
+ })))
+ .type(
+ "ListDomainsResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "domains",
+ new LoadableTypeBatchResolver<>(
+ domainType,
+ (env) ->
+ ((ListDomainsResult) env.getSource())
+ .getDomains().stream()
+ .map(Domain::getUrn)
+ .collect(Collectors.toList()))))
+ .type(
+ "GetRootGlossaryTermsResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "terms",
+ new LoadableTypeBatchResolver<>(
+ glossaryTermType,
+ (env) ->
+ ((GetRootGlossaryTermsResult) env.getSource())
+ .getTerms().stream()
+ .map(GlossaryTerm::getUrn)
+ .collect(Collectors.toList()))))
+ .type(
+ "GetRootGlossaryNodesResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "nodes",
+ new LoadableTypeBatchResolver<>(
+ glossaryNodeType,
+ (env) ->
+ ((GetRootGlossaryNodesResult) env.getSource())
+ .getNodes().stream()
+ .map(GlossaryNode::getUrn)
+ .collect(Collectors.toList()))))
+ .type(
+ "AutoCompleteResults",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entities",
+ new EntityTypeBatchResolver(
+ entityTypes,
+ (env) -> ((AutoCompleteResults) env.getSource()).getEntities())))
+ .type(
+ "AutoCompleteResultForEntity",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entities",
+ new EntityTypeBatchResolver(
+ entityTypes,
+ (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities())))
+ .type(
+ "PolicyMatchCriterionValue",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes,
+ (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity())))
+ .type(
+ "ListTestsResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "tests",
+ new LoadableTypeBatchResolver<>(
+ testType,
+ (env) ->
+ ((ListTestsResult) env.getSource())
+ .getTests().stream()
+ .map(Test::getUrn)
+ .collect(Collectors.toList()))))
+ .type(
+ "QuickFilter",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "entity",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((QuickFilter) env.getSource()).getEntity())))
+ .type(
+ "Owner",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "ownershipType",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((Owner) env.getSource()).getOwnershipType())));
+ }
- private DataFetcher getEntityResolver() {
- return new EntityTypeResolver(entityTypes,
- (env) -> {
- try {
- Urn urn = Urn.createFromString(env.getArgument(URN_FIELD_NAME));
- return UrnToEntityMapper.map(urn);
- } catch (Exception e) {
- throw new RuntimeException("Failed to get entity", e);
- }
- });
- }
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.Dataset} type.
+ */
+ private void configureDatasetResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "Dataset",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType))
+ .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((Dataset) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher(
+ "container",
+ new LoadableTypeResolver<>(
+ containerType,
+ (env) -> {
+ final Dataset dataset = env.getSource();
+ return dataset.getContainer() != null
+ ? dataset.getContainer().getUrn()
+ : null;
+ }))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final Dataset dataset = env.getSource();
+ return dataset.getDataPlatformInstance() != null
+ ? dataset.getDataPlatformInstance().getUrn()
+ : null;
+ }))
+ .dataFetcher(
+ "datasetProfiles",
+ new TimeSeriesAspectResolver(
+ this.entityClient,
+ "dataset",
+ "datasetProfile",
+ DatasetProfileMapper::map))
+ .dataFetcher(
+ "operations",
+ new TimeSeriesAspectResolver(
+ this.entityClient,
+ "dataset",
+ "operation",
+ OperationMapper::map,
+ new SortCriterion()
+ .setField(OPERATION_EVENT_TIME_FIELD_NAME)
+ .setOrder(SortOrder.DESCENDING)))
+ .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient))
+ .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient))
+ .dataFetcher(
+ "health", new DatasetHealthResolver(graphClient, timeseriesAspectService))
+ .dataFetcher("schemaMetadata", new AspectResolver())
+ .dataFetcher(
+ "assertions", new EntityAssertionsResolver(entityClient, graphClient))
+ .dataFetcher("testResults", new TestResultsResolver(entityClient))
+ .dataFetcher(
+ "aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))
+ .dataFetcher("exists", new EntityExistsResolver(entityService))
+ .dataFetcher("runs", new EntityRunsResolver(entityClient))
+ .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
+ .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)))
+ .type(
+ "Owner",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "owner",
+ new OwnerTypeResolver<>(
+ ownerTypes, (env) -> ((Owner) env.getSource()).getOwner())))
+ .type(
+ "UserUsageCounts",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "user",
+ new LoadableTypeResolver<>(
+ corpUserType,
+ (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn())))
+ .type(
+ "ForeignKeyConstraint",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "foreignDataset",
+ new LoadableTypeResolver<>(
+ datasetType,
+ (env) ->
+ ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn())))
+ .type(
+ "SiblingProperties",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "siblings",
+ new EntityTypeBatchResolver(
+ new ArrayList<>(entityTypes),
+ (env) -> ((SiblingProperties) env.getSource()).getSiblings())))
+ .type(
+ "InstitutionalMemoryMetadata",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "author",
+ new LoadableTypeResolver<>(
+ corpUserType,
+ (env) ->
+ ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn())))
+ .type(
+ "DatasetStatsSummary",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "topUsersLast30Days",
+ new LoadableTypeBatchResolver<>(
+ corpUserType,
+ (env) -> {
+ DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource());
+ return summary.getTopUsersLast30Days() != null
+ ? summary.getTopUsersLast30Days().stream()
+ .map(CorpUser::getUrn)
+ .collect(Collectors.toList())
+ : null;
+ })));
+ }
- private DataFetcher getResolver(LoadableType, String> loadableType) {
- return getResolver(loadableType, this::getUrnField);
- }
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.VersionedDataset} type.
+ */
+ private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "VersionedDataset",
+ typeWiring -> typeWiring.dataFetcher("relationships", new StaticDataFetcher(null)));
+ }
- private DataFetcher getResolver(LoadableType loadableType,
- Function keyProvider) {
- return new LoadableTypeResolver<>(loadableType, keyProvider);
- }
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type.
+ */
+ private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "AccessToken",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "metadata",
+ new LoadableTypeResolver<>(
+ accessTokenMetadataType,
+ (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn())));
+ builder.type(
+ "ListAccessTokenResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "tokens",
+ new LoadableTypeBatchResolver<>(
+ accessTokenMetadataType,
+ (env) ->
+ ((ListAccessTokenResult) env.getSource())
+ .getTokens().stream()
+ .map(AccessTokenMetadata::getUrn)
+ .collect(Collectors.toList()))));
+ }
+
+ private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "GlossaryTerm",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("schemaMetadata", new AspectResolver())
+ .dataFetcher("parentNodes", new ParentNodesResolver(entityClient))
+ .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
+ .dataFetcher("exists", new EntityExistsResolver(entityService)));
+ }
+
+ private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "GlossaryNode",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("parentNodes", new ParentNodesResolver(entityClient))
+ .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
+ .dataFetcher("exists", new EntityExistsResolver(entityService)));
+ }
+
+ private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "SchemaFieldEntity",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "parent",
+ new EntityTypeResolver(
+ entityTypes, (env) -> ((SchemaFieldEntity) env.getSource()).getParent())));
+ }
+
+ private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "EntityPath",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "path",
+ new BatchGetEntitiesResolver(
+ entityTypes, (env) -> ((EntityPath) env.getSource()).getPath())));
+ }
- private String getUrnField(DataFetchingEnvironment env) {
- return env.getArgument(URN_FIELD_NAME);
- }
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.CorpUser} type.
+ */
+ private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "CorpUser",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)));
+ builder.type(
+ "CorpUserInfo",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "manager",
+ new LoadableTypeResolver<>(
+ corpUserType,
+ (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn())));
+ }
- private void configureMutationResolvers(final RuntimeWiring.Builder builder) {
- builder.type("Mutation", typeWiring -> typeWiring
- .dataFetcher("updateDataset", new MutableTypeResolver<>(datasetType))
- .dataFetcher("updateDatasets", new MutableTypeBatchResolver<>(datasetType))
- .dataFetcher("createTag", new CreateTagResolver(this.entityClient, this.entityService))
- .dataFetcher("updateTag", new MutableTypeResolver<>(tagType))
- .dataFetcher("setTagColor", new SetTagColorResolver(entityClient, entityService))
- .dataFetcher("deleteTag", new DeleteTagResolver(entityClient))
- .dataFetcher("updateChart", new MutableTypeResolver<>(chartType))
- .dataFetcher("updateDashboard", new MutableTypeResolver<>(dashboardType))
- .dataFetcher("updateNotebook", new MutableTypeResolver<>(notebookType))
- .dataFetcher("updateDataJob", new MutableTypeResolver<>(dataJobType))
- .dataFetcher("updateDataFlow", new MutableTypeResolver<>(dataFlowType))
- .dataFetcher("updateCorpUserProperties", new MutableTypeResolver<>(corpUserType))
- .dataFetcher("updateCorpGroupProperties", new MutableTypeResolver<>(corpGroupType))
- .dataFetcher("addTag", new AddTagResolver(entityService))
- .dataFetcher("addTags", new AddTagsResolver(entityService))
- .dataFetcher("batchAddTags", new BatchAddTagsResolver(entityService))
- .dataFetcher("removeTag", new RemoveTagResolver(entityService))
- .dataFetcher("batchRemoveTags", new BatchRemoveTagsResolver(entityService))
- .dataFetcher("addTerm", new AddTermResolver(entityService))
- .dataFetcher("batchAddTerms", new BatchAddTermsResolver(entityService))
- .dataFetcher("addTerms", new AddTermsResolver(entityService))
- .dataFetcher("removeTerm", new RemoveTermResolver(entityService))
- .dataFetcher("batchRemoveTerms", new BatchRemoveTermsResolver(entityService))
- .dataFetcher("createPolicy", new UpsertPolicyResolver(this.entityClient))
- .dataFetcher("updatePolicy", new UpsertPolicyResolver(this.entityClient))
- .dataFetcher("deletePolicy", new DeletePolicyResolver(this.entityClient))
- .dataFetcher("updateDescription", new UpdateDescriptionResolver(entityService, this.entityClient))
- .dataFetcher("addOwner", new AddOwnerResolver(entityService))
- .dataFetcher("addOwners", new AddOwnersResolver(entityService))
- .dataFetcher("batchAddOwners", new BatchAddOwnersResolver(entityService))
- .dataFetcher("removeOwner", new RemoveOwnerResolver(entityService))
- .dataFetcher("batchRemoveOwners", new BatchRemoveOwnersResolver(entityService))
- .dataFetcher("addLink", new AddLinkResolver(entityService, this.entityClient))
- .dataFetcher("removeLink", new RemoveLinkResolver(entityService))
- .dataFetcher("addGroupMembers", new AddGroupMembersResolver(this.groupService))
- .dataFetcher("removeGroupMembers", new RemoveGroupMembersResolver(this.groupService))
- .dataFetcher("createGroup", new CreateGroupResolver(this.groupService))
- .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient))
- .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient))
- .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient))
- .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient, this.entityService))
- .dataFetcher("moveDomain", new MoveDomainResolver(this.entityService, this.entityClient))
- .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient))
- .dataFetcher("setDomain", new SetDomainResolver(this.entityClient, this.entityService))
- .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService))
- .dataFetcher("updateDeprecation", new UpdateDeprecationResolver(this.entityClient, this.entityService))
- .dataFetcher("batchUpdateDeprecation", new BatchUpdateDeprecationResolver(entityService))
- .dataFetcher("unsetDomain", new UnsetDomainResolver(this.entityClient, this.entityService))
- .dataFetcher("createSecret", new CreateSecretResolver(this.entityClient, this.secretService))
- .dataFetcher("deleteSecret", new DeleteSecretResolver(this.entityClient))
- .dataFetcher("createAccessToken", new CreateAccessTokenResolver(this.statefulTokenService))
- .dataFetcher("revokeAccessToken", new RevokeAccessTokenResolver(this.entityClient, this.statefulTokenService))
- .dataFetcher("createIngestionSource", new UpsertIngestionSourceResolver(this.entityClient))
- .dataFetcher("updateIngestionSource", new UpsertIngestionSourceResolver(this.entityClient))
- .dataFetcher("deleteIngestionSource", new DeleteIngestionSourceResolver(this.entityClient))
- .dataFetcher("createIngestionExecutionRequest", new CreateIngestionExecutionRequestResolver(this.entityClient, this.ingestionConfiguration))
- .dataFetcher("cancelIngestionExecutionRequest", new CancelIngestionExecutionRequestResolver(this.entityClient))
- .dataFetcher("createTestConnectionRequest", new CreateTestConnectionRequestResolver(this.entityClient, this.ingestionConfiguration))
- .dataFetcher("deleteAssertion", new DeleteAssertionResolver(this.entityClient, this.entityService))
- .dataFetcher("createTest", new CreateTestResolver(this.entityClient))
- .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient))
- .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient))
- .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient))
- .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient, this.entityService))
- .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient, this.entityService))
- .dataFetcher("updateParentNode", new UpdateParentNodeResolver(this.entityService, this.entityClient))
- .dataFetcher("deleteGlossaryEntity",
- new DeleteGlossaryEntityResolver(this.entityClient, this.entityService))
- .dataFetcher("updateName", new UpdateNameResolver(this.entityService, this.entityClient))
- .dataFetcher("addRelatedTerms", new AddRelatedTermsResolver(this.entityService))
- .dataFetcher("removeRelatedTerms", new RemoveRelatedTermsResolver(this.entityService))
- .dataFetcher("createNativeUserResetToken", new CreateNativeUserResetTokenResolver(this.nativeUserService))
- .dataFetcher("batchUpdateSoftDeleted", new BatchUpdateSoftDeletedResolver(this.entityService))
- .dataFetcher("updateUserSetting", new UpdateUserSettingResolver(this.entityService))
- .dataFetcher("rollbackIngestion", new RollbackIngestionResolver(this.entityClient))
- .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService))
- .dataFetcher("createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService))
- .dataFetcher("acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService))
- .dataFetcher("createPost", new CreatePostResolver(this.postService))
- .dataFetcher("deletePost", new DeletePostResolver(this.postService))
- .dataFetcher("batchUpdateStepStates", new BatchUpdateStepStatesResolver(this.entityClient))
- .dataFetcher("createView", new CreateViewResolver(this.viewService))
- .dataFetcher("updateView", new UpdateViewResolver(this.viewService))
- .dataFetcher("deleteView", new DeleteViewResolver(this.viewService))
- .dataFetcher("updateGlobalViewsSettings", new UpdateGlobalViewsSettingsResolver(this.settingsService))
- .dataFetcher("updateCorpUserViewsSettings", new UpdateCorpUserViewsSettingsResolver(this.settingsService))
- .dataFetcher("updateLineage", new UpdateLineageResolver(this.entityService, this.lineageService))
- .dataFetcher("updateEmbed", new UpdateEmbedResolver(this.entityService))
- .dataFetcher("createQuery", new CreateQueryResolver(this.queryService))
- .dataFetcher("updateQuery", new UpdateQueryResolver(this.queryService))
- .dataFetcher("deleteQuery", new DeleteQueryResolver(this.queryService))
- .dataFetcher("createDataProduct", new CreateDataProductResolver(this.dataProductService))
- .dataFetcher("updateDataProduct", new UpdateDataProductResolver(this.dataProductService))
- .dataFetcher("deleteDataProduct", new DeleteDataProductResolver(this.dataProductService))
- .dataFetcher("batchSetDataProduct", new BatchSetDataProductResolver(this.dataProductService))
- .dataFetcher("createOwnershipType", new CreateOwnershipTypeResolver(this.ownershipTypeService))
- .dataFetcher("updateOwnershipType", new UpdateOwnershipTypeResolver(this.ownershipTypeService))
- .dataFetcher("deleteOwnershipType", new DeleteOwnershipTypeResolver(this.ownershipTypeService))
- );
- }
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.CorpGroup} type.
+ */
+ private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "CorpGroup",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher("exists", new EntityExistsResolver(entityService)));
+ builder
+ .type(
+ "CorpGroupInfo",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "admins",
+ new LoadableTypeBatchResolver<>(
+ corpUserType,
+ (env) ->
+ ((CorpGroupInfo) env.getSource())
+ .getAdmins().stream()
+ .map(CorpUser::getUrn)
+ .collect(Collectors.toList())))
+ .dataFetcher(
+ "members",
+ new LoadableTypeBatchResolver<>(
+ corpUserType,
+ (env) ->
+ ((CorpGroupInfo) env.getSource())
+ .getMembers().stream()
+ .map(CorpUser::getUrn)
+ .collect(Collectors.toList()))))
+ .type(
+ "ListGroupsResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "groups",
+ new LoadableTypeBatchResolver<>(
+ corpGroupType,
+ (env) ->
+ ((ListGroupsResult) env.getSource())
+ .getGroups().stream()
+ .map(CorpGroup::getUrn)
+ .collect(Collectors.toList()))));
+ }
+
+ private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Tag",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)));
+ builder.type(
+ "TagAssociation",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "tag",
+ new LoadableTypeResolver<>(
+ tagType,
+ (env) ->
+ ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource())
+ .getTag()
+ .getUrn())));
+ }
+
+ private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "GlossaryTermAssociation",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "term",
+ new LoadableTypeResolver<>(
+ glossaryTermType,
+ (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn())));
+ }
- private void configureGenericEntityResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("SearchResult", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((SearchResult) env.getSource()).getEntity()))
- )
- .type("MatchedField", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((MatchedField) env.getSource()).getEntity()))
- )
- .type("SearchAcrossLineageResult", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((SearchAcrossLineageResult) env.getSource()).getEntity()))
- )
- .type("AggregationMetadata", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((AggregationMetadata) env.getSource()).getEntity()))
- )
- .type("RecommendationContent", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((RecommendationContent) env.getSource()).getEntity()))
- )
- .type("BrowseResults", typeWiring -> typeWiring
- .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes,
- (env) -> ((BrowseResults) env.getSource()).getEntities()))
- )
- .type("ParentDomainsResult", typeWiring -> typeWiring
- .dataFetcher("domains", new EntityTypeBatchResolver(entityTypes,
- (env) -> {
- final ParentDomainsResult result = env.getSource();
- return result != null ? result.getDomains() : null;
- }))
- )
- .type("EntityRelationshipLegacy", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((EntityRelationshipLegacy) env.getSource()).getEntity()))
- )
- .type("EntityRelationship", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((EntityRelationship) env.getSource()).getEntity()))
- )
- .type("BrowseResultGroupV2", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((BrowseResultGroupV2) env.getSource()).getEntity()))
- )
- .type("BrowsePathEntry", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((BrowsePathEntry) env.getSource()).getEntity()))
- )
- .type("LineageRelationship", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((LineageRelationship) env.getSource()).getEntity()))
- .dataFetcher("createdActor",
- new EntityTypeResolver(entityTypes,
- (env) -> {
- final LineageRelationship relationship = env.getSource();
- return relationship.getCreatedActor() != null ? relationship.getCreatedActor() : null;
- })
- )
- .dataFetcher("updatedActor",
- new EntityTypeResolver(entityTypes,
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.Notebook} type.
+ */
+ private void configureNotebookResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Notebook",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType))
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((Notebook) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher("exists", new EntityExistsResolver(entityService))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
(env) -> {
- final LineageRelationship relationship = env.getSource();
- return relationship.getUpdatedActor() != null ? relationship.getUpdatedActor() : null;
- })
- )
- )
- .type("ListDomainsResult", typeWiring -> typeWiring
- .dataFetcher("domains", new LoadableTypeBatchResolver<>(domainType,
- (env) -> ((ListDomainsResult) env.getSource()).getDomains().stream()
- .map(Domain::getUrn)
- .collect(Collectors.toList())))
- )
- .type("GetRootGlossaryTermsResult", typeWiring -> typeWiring
- .dataFetcher("terms", new LoadableTypeBatchResolver<>(glossaryTermType,
- (env) -> ((GetRootGlossaryTermsResult) env.getSource()).getTerms().stream()
- .map(GlossaryTerm::getUrn)
- .collect(Collectors.toList())))
- )
- .type("GetRootGlossaryNodesResult", typeWiring -> typeWiring
- .dataFetcher("nodes", new LoadableTypeBatchResolver<>(glossaryNodeType,
- (env) -> ((GetRootGlossaryNodesResult) env.getSource()).getNodes().stream()
- .map(GlossaryNode::getUrn)
- .collect(Collectors.toList())))
- )
- .type("AutoCompleteResults", typeWiring -> typeWiring
- .dataFetcher("entities",
- new EntityTypeBatchResolver(entityTypes,
- (env) -> ((AutoCompleteResults) env.getSource()).getEntities()))
- )
- .type("AutoCompleteResultForEntity", typeWiring -> typeWiring
- .dataFetcher("entities", new EntityTypeBatchResolver(entityTypes,
- (env) -> ((AutoCompleteResultForEntity) env.getSource()).getEntities()))
- )
- .type("PolicyMatchCriterionValue", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((PolicyMatchCriterionValue) env.getSource()).getEntity()))
- )
- .type("ListTestsResult", typeWiring -> typeWiring
- .dataFetcher("tests", new LoadableTypeBatchResolver<>(testType,
- (env) -> ((ListTestsResult) env.getSource()).getTests().stream()
- .map(Test::getUrn)
- .collect(Collectors.toList())))
- )
- .type("QuickFilter", typeWiring -> typeWiring
- .dataFetcher("entity", new EntityTypeResolver(entityTypes,
- (env) -> ((QuickFilter) env.getSource()).getEntity()))
- )
- .type("Owner", typeWiring -> typeWiring
- .dataFetcher("ownershipType", new EntityTypeResolver(entityTypes,
- (env) -> ((Owner) env.getSource()).getOwnershipType()))
- );
- }
+ final Notebook notebook = env.getSource();
+ return notebook.getDataPlatformInstance() != null
+ ? notebook.getDataPlatformInstance().getUrn()
+ : null;
+ })));
+ }
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dataset} type.
- */
- private void configureDatasetResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("Dataset", typeWiring -> typeWiring
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.Dashboard} type.
+ */
+ private void configureDashboardResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Dashboard",
+ typeWiring ->
+ typeWiring
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.datasetType))
+ .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType))
.dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((Dataset) env.getSource()).getPlatform().getUrn())
- )
- .dataFetcher("container",
- new LoadableTypeResolver<>(containerType,
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
(env) -> {
- final Dataset dataset = env.getSource();
- return dataset.getContainer() != null ? dataset.getContainer().getUrn() : null;
- })
- )
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ final Dashboard dashboard = env.getSource();
+ return dashboard.getDataPlatformInstance() != null
+ ? dashboard.getDataPlatformInstance().getUrn()
+ : null;
+ }))
+ .dataFetcher(
+ "container",
+ new LoadableTypeResolver<>(
+ containerType,
(env) -> {
- final Dataset dataset = env.getSource();
- return dataset.getDataPlatformInstance() != null ? dataset.getDataPlatformInstance().getUrn() : null;
- })
- )
- .dataFetcher("datasetProfiles", new TimeSeriesAspectResolver(
- this.entityClient,
- "dataset",
- "datasetProfile",
- DatasetProfileMapper::map
- )
- )
- .dataFetcher("operations", new TimeSeriesAspectResolver(
- this.entityClient,
- "dataset",
- "operation",
- OperationMapper::map,
- new SortCriterion().setField(OPERATION_EVENT_TIME_FIELD_NAME).setOrder(SortOrder.DESCENDING)
- )
- )
- .dataFetcher("usageStats", new DatasetUsageStatsResolver(this.usageClient))
- .dataFetcher("statsSummary", new DatasetStatsSummaryResolver(this.usageClient))
- .dataFetcher("health", new DatasetHealthResolver(graphClient, timeseriesAspectService))
- .dataFetcher("schemaMetadata", new AspectResolver())
- .dataFetcher("assertions", new EntityAssertionsResolver(entityClient, graphClient))
- .dataFetcher("testResults", new TestResultsResolver(entityClient))
- .dataFetcher("aspects", new WeaklyTypedAspectsResolver(entityClient, entityRegistry))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("runs", new EntityRunsResolver(entityClient))
+ final Dashboard dashboard = env.getSource();
+ return dashboard.getContainer() != null
+ ? dashboard.getContainer().getUrn()
+ : null;
+ }))
+ .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
+ .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService))
+ .dataFetcher(
+ "statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService))
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
- .dataFetcher("parentContainers", new ParentContainersResolver(entityClient)))
- .type("Owner", typeWiring -> typeWiring
- .dataFetcher("owner", new OwnerTypeResolver<>(ownerTypes,
- (env) -> ((Owner) env.getSource()).getOwner()))
- )
- .type("UserUsageCounts", typeWiring -> typeWiring
- .dataFetcher("user", new LoadableTypeResolver<>(corpUserType,
- (env) -> ((UserUsageCounts) env.getSource()).getUser().getUrn()))
- )
- .type("ForeignKeyConstraint", typeWiring -> typeWiring
- .dataFetcher("foreignDataset", new LoadableTypeResolver<>(datasetType,
- (env) -> ((ForeignKeyConstraint) env.getSource()).getForeignDataset().getUrn()))
- )
- .type("SiblingProperties", typeWiring -> typeWiring
- .dataFetcher("siblings",
- new EntityTypeBatchResolver(
- new ArrayList<>(entityTypes),
- (env) -> ((SiblingProperties) env.getSource()).getSiblings()))
- )
- .type("InstitutionalMemoryMetadata", typeWiring -> typeWiring
- .dataFetcher("author", new LoadableTypeResolver<>(corpUserType,
- (env) -> ((InstitutionalMemoryMetadata) env.getSource()).getAuthor().getUrn()))
- )
- .type("DatasetStatsSummary", typeWiring -> typeWiring
- .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType,
+ .dataFetcher("exists", new EntityExistsResolver(entityService)));
+ builder.type(
+ "DashboardInfo",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "charts",
+ new LoadableTypeBatchResolver<>(
+ chartType,
+ (env) ->
+ ((DashboardInfo) env.getSource())
+ .getCharts().stream()
+ .map(Chart::getUrn)
+ .collect(Collectors.toList()))));
+ builder.type(
+ "DashboardUserUsageCounts",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "user",
+ new LoadableTypeResolver<>(
+ corpUserType,
+ (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn())));
+ builder.type(
+ "DashboardStatsSummary",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "topUsersLast30Days",
+ new LoadableTypeBatchResolver<>(
+ corpUserType,
(env) -> {
- DatasetStatsSummary summary = ((DatasetStatsSummary) env.getSource());
- return summary.getTopUsersLast30Days() != null
- ? summary.getTopUsersLast30Days().stream()
- .map(CorpUser::getUrn)
- .collect(Collectors.toList())
- : null;
- }))
- );
- }
-
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.VersionedDataset} type.
- */
- private void configureVersionedDatasetResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("VersionedDataset", typeWiring -> typeWiring
- .dataFetcher("relationships", new StaticDataFetcher(null)));
-
- }
-
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.AccessTokenMetadata} type.
- */
- private void configureAccessAccessTokenMetadataResolvers(final RuntimeWiring.Builder builder) {
- builder.type("AccessToken", typeWiring -> typeWiring
- .dataFetcher("metadata", new LoadableTypeResolver<>(accessTokenMetadataType,
- (env) -> ((AccessToken) env.getSource()).getMetadata().getUrn()))
- );
- builder.type("ListAccessTokenResult", typeWiring -> typeWiring
- .dataFetcher("tokens", new LoadableTypeBatchResolver<>(accessTokenMetadataType,
- (env) -> ((ListAccessTokenResult) env.getSource()).getTokens().stream()
- .map(AccessTokenMetadata::getUrn)
- .collect(Collectors.toList())))
- );
- }
-
- private void configureGlossaryTermResolvers(final RuntimeWiring.Builder builder) {
- builder.type("GlossaryTerm", typeWiring -> typeWiring
- .dataFetcher("schemaMetadata", new AspectResolver())
- .dataFetcher("parentNodes", new ParentNodesResolver(entityClient))
- .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- );
- }
-
- private void configureGlossaryNodeResolvers(final RuntimeWiring.Builder builder) {
- builder.type("GlossaryNode", typeWiring -> typeWiring
- .dataFetcher("parentNodes", new ParentNodesResolver(entityClient))
- .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- );
- }
-
- private void configureSchemaFieldResolvers(final RuntimeWiring.Builder builder) {
- builder.type("SchemaFieldEntity", typeWiring -> typeWiring
- .dataFetcher("parent", new EntityTypeResolver(entityTypes,
- (env) -> ((SchemaFieldEntity) env.getSource()).getParent()))
- );
- }
-
- private void configureEntityPathResolvers(final RuntimeWiring.Builder builder) {
- builder.type("EntityPath", typeWiring -> typeWiring
- .dataFetcher("path", new BatchGetEntitiesResolver(entityTypes,
- (env) -> ((EntityPath) env.getSource()).getPath()))
- );
- }
-
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpUser} type.
- */
- private void configureCorpUserResolvers(final RuntimeWiring.Builder builder) {
- builder.type("CorpUser", typeWiring -> typeWiring
- .dataFetcher("relationships",
- new EntityRelationshipsResultResolver(graphClient))
- );
- builder.type("CorpUserInfo", typeWiring -> typeWiring
- .dataFetcher("manager", new LoadableTypeResolver<>(corpUserType,
- (env) -> ((CorpUserInfo) env.getSource()).getManager().getUrn()))
- );
- }
-
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.CorpGroup} type.
- */
- private void configureCorpGroupResolvers(final RuntimeWiring.Builder builder) {
- builder.type("CorpGroup", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("exists", new EntityExistsResolver(entityService)));
- builder.type("CorpGroupInfo", typeWiring -> typeWiring
- .dataFetcher("admins",
- new LoadableTypeBatchResolver<>(corpUserType,
- (env) -> ((CorpGroupInfo) env.getSource()).getAdmins().stream()
- .map(CorpUser::getUrn)
- .collect(Collectors.toList())))
- .dataFetcher("members",
- new LoadableTypeBatchResolver<>(corpUserType,
- (env) -> ((CorpGroupInfo) env.getSource()).getMembers().stream()
- .map(CorpUser::getUrn)
- .collect(Collectors.toList())))
- )
- .type("ListGroupsResult", typeWiring -> typeWiring
- .dataFetcher("groups", new LoadableTypeBatchResolver<>(corpGroupType,
- (env) -> ((ListGroupsResult) env.getSource()).getGroups().stream()
- .map(CorpGroup::getUrn)
- .collect(Collectors.toList())))
- );
- }
-
- private void configureTagAssociationResolver(final RuntimeWiring.Builder builder) {
- builder.type("Tag", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)));
- builder.type("TagAssociation", typeWiring -> typeWiring
- .dataFetcher("tag",
- new LoadableTypeResolver<>(tagType,
- (env) -> ((com.linkedin.datahub.graphql.generated.TagAssociation) env.getSource()).getTag().getUrn()))
- );
- }
-
- private void configureGlossaryTermAssociationResolver(final RuntimeWiring.Builder builder) {
- builder.type("GlossaryTermAssociation", typeWiring -> typeWiring
- .dataFetcher("term",
- new LoadableTypeResolver<>(glossaryTermType,
- (env) -> ((GlossaryTermAssociation) env.getSource()).getTerm().getUrn()))
- );
- }
+ DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource());
+ return summary.getTopUsersLast30Days() != null
+ ? summary.getTopUsersLast30Days().stream()
+ .map(CorpUser::getUrn)
+ .collect(Collectors.toList())
+ : null;
+ })));
+ }
/**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Notebook} type.
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.Chart} type.
*/
- private void configureNotebookResolvers(final RuntimeWiring.Builder builder) {
- builder.type("Notebook", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.notebookType))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((Notebook) env.getSource()).getPlatform().getUrn()))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
- (env) -> {
- final Notebook notebook = env.getSource();
- return notebook.getDataPlatformInstance() != null ? notebook.getDataPlatformInstance().getUrn() : null;
- })
- )
- );
- }
-
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Dashboard} type.
- */
- private void configureDashboardResolvers(final RuntimeWiring.Builder builder) {
- builder.type("Dashboard", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dashboardType))
- .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((Dashboard) env.getSource()).getPlatform().getUrn()))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
- (env) -> {
- final Dashboard dashboard = env.getSource();
- return dashboard.getDataPlatformInstance() != null ? dashboard.getDataPlatformInstance().getUrn() : null;
- })
- )
- .dataFetcher("container", new LoadableTypeResolver<>(containerType,
- (env) -> {
- final Dashboard dashboard = env.getSource();
- return dashboard.getContainer() != null ? dashboard.getContainer().getUrn() : null;
- })
- )
- .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
- .dataFetcher("usageStats", new DashboardUsageStatsResolver(timeseriesAspectService))
- .dataFetcher("statsSummary", new DashboardStatsSummaryResolver(timeseriesAspectService))
- .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- );
- builder.type("DashboardInfo", typeWiring -> typeWiring
- .dataFetcher("charts", new LoadableTypeBatchResolver<>(chartType,
- (env) -> ((DashboardInfo) env.getSource()).getCharts().stream()
- .map(Chart::getUrn)
- .collect(Collectors.toList())))
- );
- builder.type("DashboardUserUsageCounts", typeWiring -> typeWiring
- .dataFetcher("user", new LoadableTypeResolver<>(
- corpUserType,
- (env) -> ((DashboardUserUsageCounts) env.getSource()).getUser().getUrn()))
- );
- builder.type("DashboardStatsSummary", typeWiring -> typeWiring
- .dataFetcher("topUsersLast30Days", new LoadableTypeBatchResolver<>(corpUserType,
- (env) -> {
- DashboardStatsSummary summary = ((DashboardStatsSummary) env.getSource());
- return summary.getTopUsersLast30Days() != null
- ? summary.getTopUsersLast30Days().stream()
- .map(CorpUser::getUrn)
- .collect(Collectors.toList())
- : null;
- }))
- );
- }
-
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.Chart} type.
- */
- private void configureChartResolvers(final RuntimeWiring.Builder builder) {
- builder.type("Chart", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType))
- .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((Chart) env.getSource()).getPlatform().getUrn()))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
- (env) -> {
- final Chart chart = env.getSource();
- return chart.getDataPlatformInstance() != null ? chart.getDataPlatformInstance().getUrn() : null;
- })
- )
- .dataFetcher("container", new LoadableTypeResolver<>(
- containerType,
- (env) -> {
- final Chart chart = env.getSource();
- return chart.getContainer() != null ? chart.getContainer().getUrn() : null;
- })
- )
- .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
- .dataFetcher("statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService))
- .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- );
- builder.type("ChartInfo", typeWiring -> typeWiring
- .dataFetcher("inputs", new LoadableTypeBatchResolver<>(datasetType,
- (env) -> ((ChartInfo) env.getSource()).getInputs().stream()
- .map(datasetType.getKeyProvider())
- .collect(Collectors.toList())))
- );
- }
-
- /**
- * Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types.
- */
- private void configureTypeResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("Entity", typeWiring -> typeWiring
- .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream()
- .filter(graphType -> graphType instanceof EntityType)
- .map(graphType -> (EntityType, ?>) graphType)
- .collect(Collectors.toList())
- )))
- .type("EntityWithRelationships", typeWiring -> typeWiring
- .typeResolver(new EntityInterfaceTypeResolver(loadableTypes.stream()
- .filter(graphType -> graphType instanceof EntityType)
- .map(graphType -> (EntityType, ?>) graphType)
- .collect(Collectors.toList())
- )))
- .type("BrowsableEntity", typeWiring -> typeWiring
- .typeResolver(new EntityInterfaceTypeResolver(browsableTypes.stream()
- .map(graphType -> (EntityType, ?>) graphType)
- .collect(Collectors.toList())
- )))
- .type("OwnerType", typeWiring -> typeWiring
- .typeResolver(new EntityInterfaceTypeResolver(ownerTypes.stream()
- .filter(graphType -> graphType instanceof EntityType)
- .map(graphType -> (EntityType, ?>) graphType)
- .collect(Collectors.toList())
- )))
- .type("PlatformSchema", typeWiring -> typeWiring
- .typeResolver(new PlatformSchemaUnionTypeResolver())
- )
- .type("HyperParameterValueType", typeWiring -> typeWiring
- .typeResolver(new HyperParameterValueTypeResolver())
- )
- .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver()))
- .type("TimeSeriesAspect", typeWiring -> typeWiring
- .typeResolver(new TimeSeriesAspectInterfaceTypeResolver()))
- .type("ResultsType", typeWiring -> typeWiring
- .typeResolver(new ResultsTypeResolver()));
- }
-
- /**
- * Configures custom type extensions leveraged within our GraphQL schema.
- */
- private void configureTypeExtensions(final RuntimeWiring.Builder builder) {
- builder.scalar(GraphQLLong);
- }
-
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataJob} type.
- */
- private void configureDataJobResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("DataJob", typeWiring -> typeWiring
+ private void configureChartResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Chart",
+ typeWiring ->
+ typeWiring
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType))
+ .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.chartType))
.dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("dataFlow", new LoadableTypeResolver<>(dataFlowType,
- (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn()))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((Chart) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final Chart chart = env.getSource();
+ return chart.getDataPlatformInstance() != null
+ ? chart.getDataPlatformInstance().getUrn()
+ : null;
+ }))
+ .dataFetcher(
+ "container",
+ new LoadableTypeResolver<>(
+ containerType,
(env) -> {
- final DataJob dataJob = env.getSource();
- return dataJob.getDataPlatformInstance() != null ? dataJob.getDataPlatformInstance().getUrn() : null;
- })
- )
- .dataFetcher("runs", new DataJobRunsResolver(entityClient))
+ final Chart chart = env.getSource();
+ return chart.getContainer() != null
+ ? chart.getContainer().getUrn()
+ : null;
+ }))
+ .dataFetcher("parentContainers", new ParentContainersResolver(entityClient))
+ .dataFetcher(
+ "statsSummary", new ChartStatsSummaryResolver(this.timeseriesAspectService))
.dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- )
- .type("DataJobInputOutput", typeWiring -> typeWiring
- .dataFetcher("inputDatasets", new LoadableTypeBatchResolver<>(datasetType,
- (env) -> ((DataJobInputOutput) env.getSource()).getInputDatasets().stream()
- .map(datasetType.getKeyProvider())
- .collect(Collectors.toList())))
- .dataFetcher("outputDatasets", new LoadableTypeBatchResolver<>(datasetType,
- (env) -> ((DataJobInputOutput) env.getSource()).getOutputDatasets().stream()
- .map(datasetType.getKeyProvider())
- .collect(Collectors.toList())))
- .dataFetcher("inputDatajobs", new LoadableTypeBatchResolver<>(dataJobType,
- (env) -> ((DataJobInputOutput) env.getSource()).getInputDatajobs().stream()
- .map(DataJob::getUrn)
- .collect(Collectors.toList())))
- );
- }
+ .dataFetcher("exists", new EntityExistsResolver(entityService)));
+ builder.type(
+ "ChartInfo",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "inputs",
+ new LoadableTypeBatchResolver<>(
+ datasetType,
+ (env) ->
+ ((ChartInfo) env.getSource())
+ .getInputs().stream()
+ .map(datasetType.getKeyProvider())
+ .collect(Collectors.toList()))));
+ }
+
+ /** Configures {@link graphql.schema.TypeResolver}s for any GQL 'union' or 'interface' types. */
+ private void configureTypeResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "Entity",
+ typeWiring ->
+ typeWiring.typeResolver(
+ new EntityInterfaceTypeResolver(
+ loadableTypes.stream()
+ .filter(graphType -> graphType instanceof EntityType)
+ .map(graphType -> (EntityType, ?>) graphType)
+ .collect(Collectors.toList()))))
+ .type(
+ "EntityWithRelationships",
+ typeWiring ->
+ typeWiring.typeResolver(
+ new EntityInterfaceTypeResolver(
+ loadableTypes.stream()
+ .filter(graphType -> graphType instanceof EntityType)
+ .map(graphType -> (EntityType, ?>) graphType)
+ .collect(Collectors.toList()))))
+ .type(
+ "BrowsableEntity",
+ typeWiring ->
+ typeWiring.typeResolver(
+ new EntityInterfaceTypeResolver(
+ browsableTypes.stream()
+ .map(graphType -> (EntityType, ?>) graphType)
+ .collect(Collectors.toList()))))
+ .type(
+ "OwnerType",
+ typeWiring ->
+ typeWiring.typeResolver(
+ new EntityInterfaceTypeResolver(
+ ownerTypes.stream()
+ .filter(graphType -> graphType instanceof EntityType)
+ .map(graphType -> (EntityType, ?>) graphType)
+ .collect(Collectors.toList()))))
+ .type(
+ "PlatformSchema",
+ typeWiring -> typeWiring.typeResolver(new PlatformSchemaUnionTypeResolver()))
+ .type(
+ "HyperParameterValueType",
+ typeWiring -> typeWiring.typeResolver(new HyperParameterValueTypeResolver()))
+ .type("Aspect", typeWiring -> typeWiring.typeResolver(new AspectInterfaceTypeResolver()))
+ .type(
+ "TimeSeriesAspect",
+ typeWiring -> typeWiring.typeResolver(new TimeSeriesAspectInterfaceTypeResolver()))
+ .type("ResultsType", typeWiring -> typeWiring.typeResolver(new ResultsTypeResolver()));
+ }
+
+ /** Configures custom type extensions leveraged within our GraphQL schema. */
+ private void configureTypeExtensions(final RuntimeWiring.Builder builder) {
+ builder.scalar(GraphQLLong);
+ }
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.DataFlow} type.
- */
- private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("DataFlow", typeWiring -> typeWiring
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.DataJob} type.
+ */
+ private void configureDataJobResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "DataJob",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataJobType))
+ .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
+ .dataFetcher(
+ "dataFlow",
+ new LoadableTypeResolver<>(
+ dataFlowType,
+ (env) -> ((DataJob) env.getSource()).getDataFlow().getUrn()))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final DataJob dataJob = env.getSource();
+ return dataJob.getDataPlatformInstance() != null
+ ? dataJob.getDataPlatformInstance().getUrn()
+ : null;
+ }))
+ .dataFetcher("runs", new DataJobRunsResolver(entityClient))
+ .dataFetcher("privileges", new EntityPrivilegesResolver(entityClient))
+ .dataFetcher("exists", new EntityExistsResolver(entityService)))
+ .type(
+ "DataJobInputOutput",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "inputDatasets",
+ new LoadableTypeBatchResolver<>(
+ datasetType,
+ (env) ->
+ ((DataJobInputOutput) env.getSource())
+ .getInputDatasets().stream()
+ .map(datasetType.getKeyProvider())
+ .collect(Collectors.toList())))
+ .dataFetcher(
+ "outputDatasets",
+ new LoadableTypeBatchResolver<>(
+ datasetType,
+ (env) ->
+ ((DataJobInputOutput) env.getSource())
+ .getOutputDatasets().stream()
+ .map(datasetType.getKeyProvider())
+ .collect(Collectors.toList())))
+ .dataFetcher(
+ "inputDatajobs",
+ new LoadableTypeBatchResolver<>(
+ dataJobType,
+ (env) ->
+ ((DataJobInputOutput) env.getSource())
+ .getInputDatajobs().stream()
+ .map(DataJob::getUrn)
+ .collect(Collectors.toList()))));
+ }
+
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.DataFlow} type.
+ */
+ private void configureDataFlowResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "DataFlow",
+ typeWiring ->
+ typeWiring
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
.dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.dataFlowType))
.dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((DataFlow) env.getSource()).getPlatform().getUrn()))
.dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
(env) -> {
- final DataFlow dataFlow = env.getSource();
- return dataFlow.getDataPlatformInstance() != null ? dataFlow.getDataPlatformInstance().getUrn() : null;
- })
- )
- );
- }
+ final DataFlow dataFlow = env.getSource();
+ return dataFlow.getDataPlatformInstance() != null
+ ? dataFlow.getDataPlatformInstance().getUrn()
+ : null;
+ })));
+ }
- /**
- * Configures resolvers responsible for resolving the {@link com.linkedin.datahub.graphql.generated.MLFeatureTable} type.
- */
- private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("MLFeatureTable", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType))
- .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("platform",
- new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn()))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ /**
+ * Configures resolvers responsible for resolving the {@link
+ * com.linkedin.datahub.graphql.generated.MLFeatureTable} type.
+ */
+ private void configureMLFeatureTableResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "MLFeatureTable",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher(
+ "browsePaths", new EntityBrowsePathsResolver(this.mlFeatureTableType))
+ .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
+ .dataFetcher("exists", new EntityExistsResolver(entityService))
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((MLFeatureTable) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final MLFeatureTable entity = env.getSource();
+ return entity.getDataPlatformInstance() != null
+ ? entity.getDataPlatformInstance().getUrn()
+ : null;
+ })))
+ .type(
+ "MLFeatureTableProperties",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "mlFeatures",
+ new LoadableTypeBatchResolver<>(
+ mlFeatureType,
+ (env) ->
+ ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null
+ ? ((MLFeatureTableProperties) env.getSource())
+ .getMlFeatures().stream()
+ .map(MLFeature::getUrn)
+ .collect(Collectors.toList())
+ : ImmutableList.of()))
+ .dataFetcher(
+ "mlPrimaryKeys",
+ new LoadableTypeBatchResolver<>(
+ mlPrimaryKeyType,
+ (env) ->
+ ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys()
+ != null
+ ? ((MLFeatureTableProperties) env.getSource())
+ .getMlPrimaryKeys().stream()
+ .map(MLPrimaryKey::getUrn)
+ .collect(Collectors.toList())
+ : ImmutableList.of())))
+ .type(
+ "MLFeatureProperties",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "sources",
+ new LoadableTypeBatchResolver<>(
+ datasetType,
(env) -> {
- final MLFeatureTable entity = env.getSource();
- return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null;
- })
- )
- )
- .type("MLFeatureTableProperties", typeWiring -> typeWiring
- .dataFetcher("mlFeatures",
- new LoadableTypeBatchResolver<>(mlFeatureType,
- (env) ->
- ((MLFeatureTableProperties) env.getSource()).getMlFeatures() != null
- ? ((MLFeatureTableProperties) env.getSource()).getMlFeatures().stream()
- .map(MLFeature::getUrn)
- .collect(Collectors.toList()) : ImmutableList.of()))
- .dataFetcher("mlPrimaryKeys",
- new LoadableTypeBatchResolver<>(mlPrimaryKeyType,
- (env) ->
- ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys() != null
- ? ((MLFeatureTableProperties) env.getSource()).getMlPrimaryKeys().stream()
- .map(MLPrimaryKey::getUrn)
- .collect(Collectors.toList()) : ImmutableList.of()))
- )
- .type("MLFeatureProperties", typeWiring -> typeWiring
- .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType,
- (env) -> {
- if (((MLFeatureProperties) env.getSource()).getSources() == null) {
+ if (((MLFeatureProperties) env.getSource()).getSources() == null) {
return Collections.emptyList();
- }
- return ((MLFeatureProperties) env.getSource()).getSources().stream()
- .map(datasetType.getKeyProvider())
- .collect(Collectors.toList());
- })
- )
- )
- .type("MLPrimaryKeyProperties", typeWiring -> typeWiring
- .dataFetcher("sources", new LoadableTypeBatchResolver<>(datasetType,
- (env) -> {
- if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) {
+ }
+ return ((MLFeatureProperties) env.getSource())
+ .getSources().stream()
+ .map(datasetType.getKeyProvider())
+ .collect(Collectors.toList());
+ })))
+ .type(
+ "MLPrimaryKeyProperties",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "sources",
+ new LoadableTypeBatchResolver<>(
+ datasetType,
+ (env) -> {
+ if (((MLPrimaryKeyProperties) env.getSource()).getSources() == null) {
return Collections.emptyList();
- }
- return ((MLPrimaryKeyProperties) env.getSource()).getSources().stream()
- .map(datasetType.getKeyProvider())
- .collect(Collectors.toList());
- })
- )
- )
- .type("MLModel", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType))
- .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((MLModel) env.getSource()).getPlatform().getUrn()))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ }
+ return ((MLPrimaryKeyProperties) env.getSource())
+ .getSources().stream()
+ .map(datasetType.getKeyProvider())
+ .collect(Collectors.toList());
+ })))
+ .type(
+ "MLModel",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelType))
+ .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
+ .dataFetcher("exists", new EntityExistsResolver(entityService))
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((MLModel) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final MLModel mlModel = env.getSource();
+ return mlModel.getDataPlatformInstance() != null
+ ? mlModel.getDataPlatformInstance().getUrn()
+ : null;
+ })))
+ .type(
+ "MLModelProperties",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "groups",
+ new LoadableTypeBatchResolver<>(
+ mlModelGroupType,
(env) -> {
- final MLModel mlModel = env.getSource();
- return mlModel.getDataPlatformInstance() != null ? mlModel.getDataPlatformInstance().getUrn() : null;
- })
- )
- )
- .type("MLModelProperties", typeWiring -> typeWiring
- .dataFetcher("groups", new LoadableTypeBatchResolver<>(mlModelGroupType,
- (env) -> {
- MLModelProperties properties = env.getSource();
- if (properties.getGroups() != null) {
+ MLModelProperties properties = env.getSource();
+ if (properties.getGroups() != null) {
return properties.getGroups().stream()
.map(MLModelGroup::getUrn)
.collect(Collectors.toList());
- }
- return Collections.emptyList();
- })
- )
- )
- .type("MLModelGroup", typeWiring -> typeWiring
+ }
+ return Collections.emptyList();
+ })))
+ .type(
+ "MLModelGroup",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher(
+ "browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType))
+ .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher("exists", new EntityExistsResolver(entityService))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final MLModelGroup entity = env.getSource();
+ return entity.getDataPlatformInstance() != null
+ ? entity.getDataPlatformInstance().getUrn()
+ : null;
+ })))
+ .type(
+ "MLFeature",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
+ .dataFetcher("exists", new EntityExistsResolver(entityService))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final MLFeature entity = env.getSource();
+ return entity.getDataPlatformInstance() != null
+ ? entity.getDataPlatformInstance().getUrn()
+ : null;
+ })))
+ .type(
+ "MLPrimaryKey",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient))
+ .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
+ .dataFetcher("exists", new EntityExistsResolver(entityService))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
+ (env) -> {
+ final MLPrimaryKey entity = env.getSource();
+ return entity.getDataPlatformInstance() != null
+ ? entity.getDataPlatformInstance().getUrn()
+ : null;
+ })));
+ }
+
+ private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "GlossaryTerm",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)))
+ .type(
+ "GlossaryNode",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)));
+ }
+
+ private void configureDomainResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Domain",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient))
+ .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient))
+ .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)));
+ builder.type(
+ "DomainAssociation",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "domain",
+ new LoadableTypeResolver<>(
+ domainType,
+ (env) ->
+ ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource())
+ .getDomain()
+ .getUrn())));
+ }
+
+ private void configureDataProductResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "DataProduct",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient))
+ .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)));
+ }
+
+ private void configureAssertionResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "Assertion",
+ typeWiring ->
+ typeWiring
.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("browsePaths", new EntityBrowsePathsResolver(this.mlModelGroupType))
- .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((MLModelGroup) env.getSource()).getPlatform().getUrn())
- )
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> ((Assertion) env.getSource()).getPlatform().getUrn()))
+ .dataFetcher(
+ "dataPlatformInstance",
+ new LoadableTypeResolver<>(
+ dataPlatformInstanceType,
(env) -> {
- final MLModelGroup entity = env.getSource();
- return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null;
- })
- )
- )
- .type("MLFeature", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ final Assertion assertion = env.getSource();
+ return assertion.getDataPlatformInstance() != null
+ ? assertion.getDataPlatformInstance().getUrn()
+ : null;
+ }))
+ .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient)));
+ }
+
+ private void configurePolicyResolvers(final RuntimeWiring.Builder builder) {
+ // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type.
+ builder.type(
+ "ActorFilter",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "resolvedUsers",
+ new LoadableTypeBatchResolver<>(
+ corpUserType,
(env) -> {
- final MLFeature entity = env.getSource();
- return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null;
- })
- )
- )
- .type("MLPrimaryKey", typeWiring -> typeWiring
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("exists", new EntityExistsResolver(entityService))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
+ final ActorFilter filter = env.getSource();
+ return filter.getUsers();
+ }))
+ .dataFetcher(
+ "resolvedGroups",
+ new LoadableTypeBatchResolver<>(
+ corpGroupType,
(env) -> {
- final MLPrimaryKey entity = env.getSource();
- return entity.getDataPlatformInstance() != null ? entity.getDataPlatformInstance().getUrn() : null;
- })
- )
- );
- }
-
- private void configureGlossaryRelationshipResolvers(final RuntimeWiring.Builder builder) {
- builder.type("GlossaryTerm", typeWiring -> typeWiring.dataFetcher("relationships",
- new EntityRelationshipsResultResolver(graphClient)))
- .type("GlossaryNode", typeWiring -> typeWiring.dataFetcher("relationships",
- new EntityRelationshipsResultResolver(graphClient)));
- }
-
- private void configureDomainResolvers(final RuntimeWiring.Builder builder) {
- builder.type("Domain", typeWiring -> typeWiring
- .dataFetcher("entities", new DomainEntitiesResolver(this.entityClient))
- .dataFetcher("parentDomains", new ParentDomainsResolver(this.entityClient))
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- );
- builder.type("DomainAssociation", typeWiring -> typeWiring
- .dataFetcher("domain",
- new LoadableTypeResolver<>(domainType,
- (env) -> ((com.linkedin.datahub.graphql.generated.DomainAssociation) env.getSource()).getDomain().getUrn()))
- );
- }
-
- private void configureDataProductResolvers(final RuntimeWiring.Builder builder) {
- builder.type("DataProduct", typeWiring -> typeWiring
- .dataFetcher("entities", new ListDataProductAssetsResolver(this.entityClient))
- .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
- );
- }
-
- private void configureAssertionResolvers(final RuntimeWiring.Builder builder) {
- builder.type("Assertion", typeWiring -> typeWiring.dataFetcher("relationships",
- new EntityRelationshipsResultResolver(graphClient))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> ((Assertion) env.getSource()).getPlatform().getUrn()))
- .dataFetcher("dataPlatformInstance",
- new LoadableTypeResolver<>(dataPlatformInstanceType,
- (env) -> {
- final Assertion assertion = env.getSource();
- return assertion.getDataPlatformInstance() != null ? assertion.getDataPlatformInstance().getUrn() : null;
- })
- )
- .dataFetcher("runEvents", new AssertionRunEventResolver(entityClient)));
- }
-
- private void configurePolicyResolvers(final RuntimeWiring.Builder builder) {
- // Register resolvers for "resolvedUsers" and "resolvedGroups" field of the Policy type.
- builder.type("ActorFilter", typeWiring -> typeWiring.dataFetcher("resolvedUsers",
- new LoadableTypeBatchResolver<>(corpUserType, (env) -> {
- final ActorFilter filter = env.getSource();
- return filter.getUsers();
- })).dataFetcher("resolvedGroups", new LoadableTypeBatchResolver<>(corpGroupType, (env) -> {
- final ActorFilter filter = env.getSource();
- return filter.getGroups();
- })).dataFetcher("resolvedRoles", new LoadableTypeBatchResolver<>(dataHubRoleType, (env) -> {
- final ActorFilter filter = env.getSource();
- return filter.getRoles();
- })).dataFetcher("resolvedOwnershipTypes", new LoadableTypeBatchResolver<>(ownershipType, (env) -> {
- final ActorFilter filter = env.getSource();
- return filter.getResourceOwnersTypes();
- })));
- }
-
- private void configureRoleResolvers(final RuntimeWiring.Builder builder) {
- builder.type("DataHubRole",
- typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)));
- }
-
- private void configureViewResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("DataHubView",
- typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)))
- .type("ListViewsResult", typeWiring -> typeWiring
- .dataFetcher("views", new LoadableTypeBatchResolver<>(
- dataHubViewType,
- (env) -> ((ListViewsResult) env.getSource()).getViews().stream()
- .map(DataHubView::getUrn)
- .collect(Collectors.toList())))
- )
- .type("CorpUserViewsSettings", typeWiring -> typeWiring
- .dataFetcher("defaultView", new LoadableTypeResolver<>(
+ final ActorFilter filter = env.getSource();
+ return filter.getGroups();
+ }))
+ .dataFetcher(
+ "resolvedRoles",
+ new LoadableTypeBatchResolver<>(
+ dataHubRoleType,
+ (env) -> {
+ final ActorFilter filter = env.getSource();
+ return filter.getRoles();
+ }))
+ .dataFetcher(
+ "resolvedOwnershipTypes",
+ new LoadableTypeBatchResolver<>(
+ ownershipType,
+ (env) -> {
+ final ActorFilter filter = env.getSource();
+ return filter.getResourceOwnersTypes();
+ })));
+ }
+
+ private void configureRoleResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "DataHubRole",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)));
+ }
+
+ private void configureViewResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "DataHubView",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)))
+ .type(
+ "ListViewsResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "views",
+ new LoadableTypeBatchResolver<>(
+ dataHubViewType,
+ (env) ->
+ ((ListViewsResult) env.getSource())
+ .getViews().stream()
+ .map(DataHubView::getUrn)
+ .collect(Collectors.toList()))))
+ .type(
+ "CorpUserViewsSettings",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "defaultView",
+ new LoadableTypeResolver<>(
dataHubViewType,
(env) -> {
- final CorpUserViewsSettings settings = env.getSource();
- if (settings.getDefaultView() != null) {
- return settings.getDefaultView().getUrn();
- }
- return null;
- }
- )
- ));
- }
-
- private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) {
- builder
- .type("QueryEntity",
- typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)))
- .type("ListQueriesResult", typeWiring -> typeWiring
- .dataFetcher("queries", new LoadableTypeBatchResolver<>(
- queryType,
- (env) -> ((ListQueriesResult) env.getSource()).getQueries().stream()
- .map(QueryEntity::getUrn)
- .collect(Collectors.toList())))
- )
- .type("QuerySubject", typeWiring -> typeWiring
- .dataFetcher("dataset", new LoadableTypeResolver<>(
- datasetType,
- (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn()))
- );
-
- }
-
- private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) {
- builder
- .type("OwnershipTypeEntity",
- typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient)))
- .type("ListOwnershipTypesResult", typeWiring -> typeWiring
- .dataFetcher("ownershipTypes", new LoadableTypeBatchResolver<>(ownershipType,
- (env) -> ((ListOwnershipTypesResult) env.getSource()).getOwnershipTypes().stream()
- .map(OwnershipTypeEntity::getUrn)
- .collect(Collectors.toList())))
- );
- }
-
- private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) {
- builder.type("DataProcessInstance",
- typeWiring -> typeWiring.dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
+ final CorpUserViewsSettings settings = env.getSource();
+ if (settings.getDefaultView() != null) {
+ return settings.getDefaultView().getUrn();
+ }
+ return null;
+ })));
+ }
+
+ private void configureQueryEntityResolvers(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "QueryEntity",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)))
+ .type(
+ "ListQueriesResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "queries",
+ new LoadableTypeBatchResolver<>(
+ queryType,
+ (env) ->
+ ((ListQueriesResult) env.getSource())
+ .getQueries().stream()
+ .map(QueryEntity::getUrn)
+ .collect(Collectors.toList()))))
+ .type(
+ "QuerySubject",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "dataset",
+ new LoadableTypeResolver<>(
+ datasetType,
+ (env) -> ((QuerySubject) env.getSource()).getDataset().getUrn())));
+ }
+
+ private void configureOwnershipTypeResolver(final RuntimeWiring.Builder builder) {
+ builder
+ .type(
+ "OwnershipTypeEntity",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "relationships", new EntityRelationshipsResultResolver(graphClient)))
+ .type(
+ "ListOwnershipTypesResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "ownershipTypes",
+ new LoadableTypeBatchResolver<>(
+ ownershipType,
+ (env) ->
+ ((ListOwnershipTypesResult) env.getSource())
+ .getOwnershipTypes().stream()
+ .map(OwnershipTypeEntity::getUrn)
+ .collect(Collectors.toList()))));
+ }
+
+ private void configureDataProcessInstanceResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "DataProcessInstance",
+ typeWiring ->
+ typeWiring
+ .dataFetcher("relationships", new EntityRelationshipsResultResolver(graphClient))
.dataFetcher("lineage", new EntityLineageResultResolver(siblingGraphService))
- .dataFetcher("state", new TimeSeriesAspectResolver(this.entityClient, "dataProcessInstance",
- DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME, DataProcessInstanceRunEventMapper::map)));
- }
-
- private void configureTestResultResolvers(final RuntimeWiring.Builder builder) {
- builder.type("TestResult", typeWiring -> typeWiring
- .dataFetcher("test", new LoadableTypeResolver<>(testType,
- (env) -> {
- final TestResult testResult = env.getSource();
- return testResult.getTest() != null ? testResult.getTest().getUrn() : null;
- }))
- );
- }
-
- private DataLoader> createDataLoader(final LoadableType graphType, final QueryContext queryContext) {
- BatchLoaderContextProvider contextProvider = () -> queryContext;
- DataLoaderOptions loaderOptions = DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider);
- return DataLoader.newDataLoader((keys, context) -> CompletableFuture.supplyAsync(() -> {
- try {
- log.debug(String.format("Batch loading entities of type: %s, keys: %s", graphType.name(), keys));
- return graphType.batchLoad(keys, context.getContext());
- } catch (Exception e) {
- log.error(String.format("Failed to load Entities of type: %s, keys: %s", graphType.name(), keys) + " " + e.getMessage());
- throw new RuntimeException(String.format("Failed to retrieve entities of type %s", graphType.name()), e);
- }
- }), loaderOptions);
- }
-
- private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) {
- builder.type("IngestionSource", typeWiring -> typeWiring
- .dataFetcher("executions", new IngestionSourceExecutionRequestsResolver(entityClient))
- .dataFetcher("platform", new LoadableTypeResolver<>(dataPlatformType,
- (env) -> {
- final IngestionSource ingestionSource = env.getSource();
- return ingestionSource.getPlatform() != null ? ingestionSource.getPlatform().getUrn() : null;
- })
- ));
- }
+ .dataFetcher(
+ "state",
+ new TimeSeriesAspectResolver(
+ this.entityClient,
+ "dataProcessInstance",
+ DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME,
+ DataProcessInstanceRunEventMapper::map)));
+ }
+
+ private void configureTestResultResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "TestResult",
+ typeWiring ->
+ typeWiring.dataFetcher(
+ "test",
+ new LoadableTypeResolver<>(
+ testType,
+ (env) -> {
+ final TestResult testResult = env.getSource();
+ return testResult.getTest() != null ? testResult.getTest().getUrn() : null;
+ })));
+ }
+
+ private DataLoader> createDataLoader(
+ final LoadableType graphType, final QueryContext queryContext) {
+ BatchLoaderContextProvider contextProvider = () -> queryContext;
+ DataLoaderOptions loaderOptions =
+ DataLoaderOptions.newOptions().setBatchLoaderContextProvider(contextProvider);
+ return DataLoader.newDataLoader(
+ (keys, context) ->
+ CompletableFuture.supplyAsync(
+ () -> {
+ try {
+ log.debug(
+ String.format(
+ "Batch loading entities of type: %s, keys: %s",
+ graphType.name(), keys));
+ return graphType.batchLoad(keys, context.getContext());
+ } catch (Exception e) {
+ log.error(
+ String.format(
+ "Failed to load Entities of type: %s, keys: %s",
+ graphType.name(), keys)
+ + " "
+ + e.getMessage());
+ throw new RuntimeException(
+ String.format("Failed to retrieve entities of type %s", graphType.name()),
+ e);
+ }
+ }),
+ loaderOptions);
+ }
+
+ private void configureIngestionSourceResolvers(final RuntimeWiring.Builder builder) {
+ builder.type(
+ "IngestionSource",
+ typeWiring ->
+ typeWiring
+ .dataFetcher(
+ "executions", new IngestionSourceExecutionRequestsResolver(entityClient))
+ .dataFetcher(
+ "platform",
+ new LoadableTypeResolver<>(
+ dataPlatformType,
+ (env) -> {
+ final IngestionSource ingestionSource = env.getSource();
+ return ingestionSource.getPlatform() != null
+ ? ingestionSource.getPlatform().getUrn()
+ : null;
+ })));
+ }
}
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java
index 157fb10ce70785..4829194a8ce4d9 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngineArgs.java
@@ -38,41 +38,41 @@
@Data
public class GmsGraphQLEngineArgs {
- EntityClient entityClient;
- SystemEntityClient systemEntityClient;
- GraphClient graphClient;
- UsageClient usageClient;
- AnalyticsService analyticsService;
- EntityService entityService;
- RecommendationsService recommendationsService;
- StatefulTokenService statefulTokenService;
- TimeseriesAspectService timeseriesAspectService;
- EntityRegistry entityRegistry;
- SecretService secretService;
- NativeUserService nativeUserService;
- IngestionConfiguration ingestionConfiguration;
- AuthenticationConfiguration authenticationConfiguration;
- AuthorizationConfiguration authorizationConfiguration;
- GitVersion gitVersion;
- TimelineService timelineService;
- boolean supportsImpactAnalysis;
- VisualConfiguration visualConfiguration;
- TelemetryConfiguration telemetryConfiguration;
- TestsConfiguration testsConfiguration;
- DataHubConfiguration datahubConfiguration;
- ViewsConfiguration viewsConfiguration;
- SiblingGraphService siblingGraphService;
- GroupService groupService;
- RoleService roleService;
- InviteTokenService inviteTokenService;
- PostService postService;
- ViewService viewService;
- OwnershipTypeService ownershipTypeService;
- SettingsService settingsService;
- LineageService lineageService;
- QueryService queryService;
- FeatureFlags featureFlags;
- DataProductService dataProductService;
+ EntityClient entityClient;
+ SystemEntityClient systemEntityClient;
+ GraphClient graphClient;
+ UsageClient usageClient;
+ AnalyticsService analyticsService;
+ EntityService entityService;
+ RecommendationsService recommendationsService;
+ StatefulTokenService statefulTokenService;
+ TimeseriesAspectService timeseriesAspectService;
+ EntityRegistry entityRegistry;
+ SecretService secretService;
+ NativeUserService nativeUserService;
+ IngestionConfiguration ingestionConfiguration;
+ AuthenticationConfiguration authenticationConfiguration;
+ AuthorizationConfiguration authorizationConfiguration;
+ GitVersion gitVersion;
+ TimelineService timelineService;
+ boolean supportsImpactAnalysis;
+ VisualConfiguration visualConfiguration;
+ TelemetryConfiguration telemetryConfiguration;
+ TestsConfiguration testsConfiguration;
+ DataHubConfiguration datahubConfiguration;
+ ViewsConfiguration viewsConfiguration;
+ SiblingGraphService siblingGraphService;
+ GroupService groupService;
+ RoleService roleService;
+ InviteTokenService inviteTokenService;
+ PostService postService;
+ ViewService viewService;
+ OwnershipTypeService ownershipTypeService;
+ SettingsService settingsService;
+ LineageService lineageService;
+ QueryService queryService;
+ FeatureFlags featureFlags;
+ DataProductService dataProductService;
- //any fork specific args should go below this line
+ // any fork specific args should go below this line
}
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java
index e7ef0c402a1de5..472d9465aeee12 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLPlugin.java
@@ -5,41 +5,42 @@
import java.util.Collection;
import java.util.List;
-
/**
- * An interface that allows the Core GMS GraphQL Engine to be extended without requiring
- * code changes in the GmsGraphQLEngine class if new entities, relationships or resolvers
- * need to be introduced. This is useful if you are maintaining a fork of DataHub and
- * don't want to deal with merge conflicts.
+ * An interface that allows the Core GMS GraphQL Engine to be extended without requiring code
+ * changes in the GmsGraphQLEngine class if new entities, relationships or resolvers need to be
+ * introduced. This is useful if you are maintaining a fork of DataHub and don't want to deal with
+ * merge conflicts.
*/
public interface GmsGraphQLPlugin {
/**
* Initialization method that allows the plugin to instantiate
+ *
* @param args
*/
void init(GmsGraphQLEngineArgs args);
/**
- * Return a list of schema files that contain graphql definitions
- * that are served by this plugin
+ * Return a list of schema files that contain graphql definitions that are served by this plugin
+ *
* @return
*/
List getSchemaFiles();
/**
* Return a list of LoadableTypes that this plugin serves
+ *
* @return
*/
Collection extends LoadableType, ?>> getLoadableTypes();
/**
- * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific resolvers.
+ * Optional callback that a plugin can implement to configure any Query, Mutation or Type specific
+ * resolvers.
+ *
* @param wiringBuilder : the builder being used to configure the runtime wiring
* @param baseEngine : a reference to the core engine and its graphql types
*/
- default void configureExtraResolvers(final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {
-
- }
-
+ default void configureExtraResolvers(
+ final RuntimeWiring.Builder wiringBuilder, final GmsGraphQLEngine baseEngine) {}
}
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java
index 74c4c541b972b1..f95727a1e8fd1d 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GraphQLEngine.java
@@ -1,5 +1,7 @@
package com.linkedin.datahub.graphql;
+import static graphql.schema.idl.RuntimeWiring.*;
+
import com.linkedin.datahub.graphql.exception.DataHubDataFetcherExceptionHandler;
import graphql.ExecutionInput;
import graphql.ExecutionResult;
@@ -22,152 +24,157 @@
import org.dataloader.DataLoader;
import org.dataloader.DataLoaderRegistry;
-import static graphql.schema.idl.RuntimeWiring.*;
-
/**
- * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and executing
- * GQL queries.
-
- * This class provides a {@link Builder} builder for constructing {@link GraphQL} instances provided one or more
- * schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}.
+ * Simple wrapper around a {@link GraphQL} instance providing APIs for building an engine and
+ * executing GQL queries.
+ *
+ *
This class provides a {@link Builder} builder for constructing {@link GraphQL} instances
+ * provided one or more schemas, {@link DataLoader}s, & a configured {@link RuntimeWiring}.
*
- *
In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set of variables.
+ *
In addition, it provides a simplified 'execute' API that accepts a 1) query string and 2) set
+ * of variables.
*/
public class GraphQLEngine {
- private final GraphQL _graphQL;
- private final Map>> _dataLoaderSuppliers;
+ private final GraphQL _graphQL;
+ private final Map>> _dataLoaderSuppliers;
- private GraphQLEngine(@Nonnull final List schemas,
- @Nonnull final RuntimeWiring runtimeWiring,
- @Nonnull final Map>> dataLoaderSuppliers) {
+ private GraphQLEngine(
+ @Nonnull final List schemas,
+ @Nonnull final RuntimeWiring runtimeWiring,
+ @Nonnull final Map>> dataLoaderSuppliers) {
- _dataLoaderSuppliers = dataLoaderSuppliers;
+ _dataLoaderSuppliers = dataLoaderSuppliers;
- /*
- * Parse schema
- */
- SchemaParser schemaParser = new SchemaParser();
- TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry();
- schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema)));
+ /*
+ * Parse schema
+ */
+ SchemaParser schemaParser = new SchemaParser();
+ TypeDefinitionRegistry typeDefinitionRegistry = new TypeDefinitionRegistry();
+ schemas.forEach(schema -> typeDefinitionRegistry.merge(schemaParser.parse(schema)));
- /*
- * Configure resolvers (data fetchers)
- */
- SchemaGenerator schemaGenerator = new SchemaGenerator();
- GraphQLSchema graphQLSchema = schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring);
+ /*
+ * Configure resolvers (data fetchers)
+ */
+ SchemaGenerator schemaGenerator = new SchemaGenerator();
+ GraphQLSchema graphQLSchema =
+ schemaGenerator.makeExecutableSchema(typeDefinitionRegistry, runtimeWiring);
- /*
- * Instantiate engine
- */
- _graphQL = new GraphQL.Builder(graphQLSchema)
+ /*
+ * Instantiate engine
+ */
+ _graphQL =
+ new GraphQL.Builder(graphQLSchema)
.defaultDataFetcherExceptionHandler(new DataHubDataFetcherExceptionHandler())
.instrumentation(new TracingInstrumentation())
.build();
- }
+ }
+
+ public ExecutionResult execute(
+ @Nonnull final String query,
+ @Nullable final Map variables,
+ @Nonnull final QueryContext context) {
+ /*
+ * Init DataLoaderRegistry - should be created for each request.
+ */
+ DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context);
- public ExecutionResult execute(@Nonnull final String query,
- @Nullable final Map variables,
- @Nonnull final QueryContext context) {
- /*
- * Init DataLoaderRegistry - should be created for each request.
- */
- DataLoaderRegistry register = createDataLoaderRegistry(_dataLoaderSuppliers, context);
-
- /*
- * Construct execution input
- */
- ExecutionInput executionInput = ExecutionInput.newExecutionInput()
+ /*
+ * Construct execution input
+ */
+ ExecutionInput executionInput =
+ ExecutionInput.newExecutionInput()
.query(query)
.variables(variables)
.dataLoaderRegistry(register)
.context(context)
.build();
- /*
- * Execute GraphQL Query
- */
- return _graphQL.execute(executionInput);
- }
+ /*
+ * Execute GraphQL Query
+ */
+ return _graphQL.execute(executionInput);
+ }
+
+ public GraphQL getGraphQL() {
+ return _graphQL;
+ }
+
+ public static Builder builder() {
+ return new Builder();
+ }
+
+ /** Used to construct a {@link GraphQLEngine}. */
+ public static class Builder {
- public GraphQL getGraphQL() {
- return _graphQL;
+ private final List _schemas = new ArrayList<>();
+ private final Map>> _loaderSuppliers =
+ new HashMap<>();
+ private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring();
+
+ /**
+ * Used to add a schema file containing the GQL types resolved by the engine.
+ *
+ * If multiple files are provided, their schemas will be merged together.
+ */
+ public Builder addSchema(final String schema) {
+ _schemas.add(schema);
+ return this;
}
- public static Builder builder() {
- return new Builder();
+ /**
+ * Used to register a {@link DataLoader} to be used within the configured resolvers.
+ *
+ *
The {@link Supplier} provided is expected to return a new instance of {@link DataLoader}
+ * when invoked.
+ *
+ *
If multiple loaders are registered with the name, the latter will override the former.
+ */
+ public Builder addDataLoader(
+ final String name, final Function> dataLoaderSupplier) {
+ _loaderSuppliers.put(name, dataLoaderSupplier);
+ return this;
}
/**
- * Used to construct a {@link GraphQLEngine}.
+ * Used to register multiple {@link DataLoader}s for use within the configured resolvers.
+ *
+ * The included {@link Supplier} provided is expected to return a new instance of {@link
+ * DataLoader} when invoked.
+ *
+ *
If multiple loaders are registered with the name, the latter will override the former.
*/
- public static class Builder {
-
- private final List _schemas = new ArrayList<>();
- private final Map>> _loaderSuppliers = new HashMap<>();
- private final RuntimeWiring.Builder _runtimeWiringBuilder = newRuntimeWiring();
-
- /**
- * Used to add a schema file containing the GQL types resolved by the engine.
- *
- * If multiple files are provided, their schemas will be merged together.
- */
- public Builder addSchema(final String schema) {
- _schemas.add(schema);
- return this;
- }
-
- /**
- * Used to register a {@link DataLoader} to be used within the configured resolvers.
- *
- * The {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked.
- *
- * If multiple loaders are registered with the name, the latter will override the former.
- */
- public Builder addDataLoader(final String name, final Function> dataLoaderSupplier) {
- _loaderSuppliers.put(name, dataLoaderSupplier);
- return this;
- }
-
- /**
- * Used to register multiple {@link DataLoader}s for use within the configured resolvers.
- *
- * The included {@link Supplier} provided is expected to return a new instance of {@link DataLoader} when invoked.
- *
- * If multiple loaders are registered with the name, the latter will override the former.
- */
- public Builder addDataLoaders(Map>> dataLoaderSuppliers) {
- _loaderSuppliers.putAll(dataLoaderSuppliers);
- return this;
- }
-
- /**
- * Used to configure the runtime wiring (data fetchers & type resolvers)
- * used in resolving the Graph QL schema.
- *
- * The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register any required
- * data + type resolvers.
- */
- public Builder configureRuntimeWiring(final Consumer builderFunc) {
- builderFunc.accept(_runtimeWiringBuilder);
- return this;
- }
-
- /**
- * Builds a {@link GraphQLEngine}.
- */
- public GraphQLEngine build() {
- return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers);
- }
+ public Builder addDataLoaders(
+ Map>> dataLoaderSuppliers) {
+ _loaderSuppliers.putAll(dataLoaderSuppliers);
+ return this;
}
- private DataLoaderRegistry createDataLoaderRegistry(final Map>> dataLoaderSuppliers,
- final QueryContext context) {
- final DataLoaderRegistry registry = new DataLoaderRegistry();
- for (String key : dataLoaderSuppliers.keySet()) {
- registry.register(key, dataLoaderSuppliers.get(key).apply(context));
- }
- return registry;
+ /**
+ * Used to configure the runtime wiring (data fetchers & type resolvers) used in resolving the
+ * Graph QL schema.
+ *
+ * The {@link Consumer} provided accepts a {@link RuntimeWiring.Builder} and should register
+ * any required data + type resolvers.
+ */
+ public Builder configureRuntimeWiring(final Consumer builderFunc) {
+ builderFunc.accept(_runtimeWiringBuilder);
+ return this;
}
+ /** Builds a {@link GraphQLEngine}. */
+ public GraphQLEngine build() {
+ return new GraphQLEngine(_schemas, _runtimeWiringBuilder.build(), _loaderSuppliers);
+ }
+ }
+
+ private DataLoaderRegistry createDataLoaderRegistry(
+ final Map>> dataLoaderSuppliers,
+ final QueryContext context) {
+ final DataLoaderRegistry registry = new DataLoaderRegistry();
+ for (String key : dataLoaderSuppliers.keySet()) {
+ registry.register(key, dataLoaderSuppliers.get(key).apply(context));
+ }
+ return registry;
+ }
}
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java
index 4803ef08fdddcf..9f110e713ed574 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/QueryContext.java
@@ -4,38 +4,25 @@
import com.datahub.authentication.Authentication;
import com.datahub.plugins.auth.authorization.Authorizer;
-
-/**
- * Provided as input to GraphQL resolvers; used to carry information about GQL request context.
- */
+/** Provided as input to GraphQL resolvers; used to carry information about GQL request context. */
public interface QueryContext {
- /**
- * Returns true if the current actor is authenticated, false otherwise.
- */
- boolean isAuthenticated();
+ /** Returns true if the current actor is authenticated, false otherwise. */
+ boolean isAuthenticated();
- /**
- * Returns the {@link Authentication} associated with the current query context.
- */
- Authentication getAuthentication();
+ /** Returns the {@link Authentication} associated with the current query context. */
+ Authentication getAuthentication();
- /**
- * Returns the current authenticated actor, null if there is none.
- */
- default Actor getActor() {
- return getAuthentication().getActor();
- }
+ /** Returns the current authenticated actor, null if there is none. */
+ default Actor getActor() {
+ return getAuthentication().getActor();
+ }
- /**
- * Returns the current authenticated actor, null if there is none.
- */
- default String getActorUrn() {
- return getActor().toUrnStr();
- }
+ /** Returns the current authenticated actor, null if there is none. */
+ default String getActorUrn() {
+ return getActor().toUrnStr();
+ }
- /**
- * Returns the authorizer used to authorize specific actions.
- */
- Authorizer getAuthorizer();
+ /** Returns the authorizer used to authorize specific actions. */
+ Authorizer getAuthorizer();
}
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java
index df7f0884852d47..425c86ab0f0f65 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/RelationshipKey.java
@@ -4,7 +4,6 @@
import lombok.AllArgsConstructor;
import lombok.Data;
-
@Data
@AllArgsConstructor
public class RelationshipKey {
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java
index d51de6652bb0ac..c3ad37ddcb2018 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/TimeSeriesAspectArgs.java
@@ -10,11 +10,7 @@ public class TimeSeriesAspectArgs {
private Long count;
private TimeRange timeRange;
- public TimeSeriesAspectArgs(
- String urn,
- String aspectName,
- Long count,
- TimeRange timeRange) {
+ public TimeSeriesAspectArgs(String urn, String aspectName, Long count, TimeRange timeRange) {
this.urn = urn;
this.aspectName = aspectName;
this.count = count;
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java
index 5f703f520bde46..c7302c9772c5ef 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/UsageStatsKey.java
@@ -3,7 +3,6 @@
import com.linkedin.usage.UsageTimeRange;
import lombok.Data;
-
@Data
public class UsageStatsKey {
private String resource;
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java
index b0c0436ffd891a..6f81de5f04d8fc 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/VersionedAspectKey.java
@@ -8,7 +8,7 @@ public class VersionedAspectKey {
private String urn;
private Long version;
- public VersionedAspectKey(String urn, String aspectName, Long version) {
+ public VersionedAspectKey(String urn, String aspectName, Long version) {
this.urn = urn;
this.version = version;
this.aspectName = aspectName;
diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java
index a78d89e59bc7bc..22ee4d4d4845c9 100644
--- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java
+++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/WeaklyTypedAspectsResolver.java
@@ -1,5 +1,7 @@
package com.linkedin.datahub.graphql;
+import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;
+
import com.linkedin.common.urn.Urn;
import com.linkedin.data.DataMap;
import com.linkedin.data.codec.JacksonDataCodec;
@@ -26,68 +28,84 @@
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
-import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*;
-
-
@Slf4j
@AllArgsConstructor
public class WeaklyTypedAspectsResolver implements DataFetcher>> {
- private final EntityClient _entityClient;
- private final EntityRegistry _entityRegistry;
- private static final JacksonDataCodec CODEC = new JacksonDataCodec();
+ private final EntityClient _entityClient;
+ private final EntityRegistry _entityRegistry;
+ private static final JacksonDataCodec CODEC = new JacksonDataCodec();
- private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) {
- return !params.getAutoRenderOnly() || aspectSpec.isAutoRender();
- }
+ private boolean shouldReturnAspect(AspectSpec aspectSpec, AspectParams params) {
+ return !params.getAutoRenderOnly() || aspectSpec.isAutoRender();
+ }
- @Override
- public CompletableFuture> get(DataFetchingEnvironment environment) throws Exception {
- return CompletableFuture.supplyAsync(() -> {
- List