diff --git a/.github/pr-labeler-config.yml b/.github/pr-labeler-config.yml index 75dcd272c21602..0689e13fa159c3 100644 --- a/.github/pr-labeler-config.yml +++ b/.github/pr-labeler-config.yml @@ -9,6 +9,7 @@ product: - 'datahub-web-react/**/*' - 'datahub-frontend/**/*' - 'datahub-graphql-core/**/*' +- 'metadata-io/**/*' docs: - 'docs/**/*' \ No newline at end of file diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index 5f96dceb5b5518..3a4b889a60caba 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -25,6 +25,8 @@ jobs: timeout-minutes: 60 steps: - uses: actions/checkout@v2 + with: + fetch-depth: 0 - name: Set up JDK 11 uses: actions/setup-java@v1 with: diff --git a/build.gradle b/build.gradle index 55a4eca1a11148..d96da3f8e3f05f 100644 --- a/build.gradle +++ b/build.gradle @@ -4,6 +4,10 @@ buildscript { ext.mavenVersion = '3.6.3' ext.springVersion = '5.3.20' ext.springBootVersion = '2.5.12' + ext.openTelemetryVersion = '1.18.0' + ext.neo4jVersion = '4.4.9' + ext.graphQLJavaVersion = '19.0' + ext.testContainersVersion = '1.17.4' apply from: './repositories.gradle' buildscript.repositories.addAll(project.repositories) dependencies { @@ -66,7 +70,8 @@ project.ext.externalDependency = [ 'elasticSearchRest': 'org.elasticsearch.client:elasticsearch-rest-high-level-client:7.9.3', 'elasticSearchTransport': 'org.elasticsearch.client:transport:7.9.3', 'findbugsAnnotations': 'com.google.code.findbugs:annotations:3.0.1', - 'graphqlJava': 'com.graphql-java:graphql-java:16.1', + 'graphqlJava': 'com.graphql-java:graphql-java:' + graphQLJavaVersion, + 'graphqlJavaScalars': 'com.graphql-java:graphql-java-extended-scalars:' + graphQLJavaVersion, 'gson': 'com.google.code.gson:gson:2.8.9', 'guice': 'com.google.inject:guice:4.2.2', 'guava': 'com.google.guava:guava:27.0.1-jre', @@ -88,6 +93,7 @@ project.ext.externalDependency = [ 'jerseyGuava': 'org.glassfish.jersey.bundles.repackaged:jersey-guava:2.25.1', 'jettyJaas': 'org.eclipse.jetty:jetty-jaas:9.4.46.v20220331', 'jgrapht': 'org.jgrapht:jgrapht-core:1.5.1', + 'jna': 'net.java.dev.jna:jna:5.12.1', 'jsonPatch': 'com.github.java-json-tools:json-patch:1.13', 'jsonSchemaAvro': 'com.github.fge:json-schema-avro:0.1.4', 'jsonSimple': 'com.googlecode.json-simple:json-simple:1.1.1', @@ -109,10 +115,10 @@ project.ext.externalDependency = [ 'mockServer': 'org.mock-server:mockserver-netty:5.11.2', 'mockServerClient': 'org.mock-server:mockserver-client-java:5.11.2', 'mysqlConnector': 'mysql:mysql-connector-java:8.0.20', - 'neo4jHarness': 'org.neo4j.test:neo4j-harness:3.4.11', - 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:4.0.1', - 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:1.0.0', - 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:1.0.0', + 'neo4jHarness': 'org.neo4j.test:neo4j-harness:' + neo4jVersion, + 'neo4jJavaDriver': 'org.neo4j.driver:neo4j-java-driver:' + neo4jVersion, + 'opentelemetryApi': 'io.opentelemetry:opentelemetry-api:' + openTelemetryVersion, + 'opentelemetryAnnotations': 'io.opentelemetry:opentelemetry-extension-annotations:' + openTelemetryVersion, 'opentracingJdbc':'io.opentracing.contrib:opentracing-jdbc:0.2.15', 'parquet': 'org.apache.parquet:parquet-avro:1.12.2', 'picocli': 'info.picocli:picocli:4.5.0', @@ -152,11 +158,11 @@ project.ext.externalDependency = [ 'springActuator': "org.springframework.boot:spring-boot-starter-actuator:$springBootVersion", 'swaggerAnnotations': 'io.swagger.core.v3:swagger-annotations:2.1.12', 'testng': 'org.testng:testng:7.3.0', - 'testContainers': 'org.testcontainers:testcontainers:1.15.3', - 'testContainersJunit': 'org.testcontainers:junit-jupiter:1.15.3', - 'testContainersPostgresql':'org.testcontainers:postgresql:1.15.3', - 'testContainersElasticsearch': 'org.testcontainers:elasticsearch:1.15.3', - 'testContainersCassandra': 'org.testcontainers:cassandra:1.15.3', + 'testContainers': 'org.testcontainers:testcontainers:' + testContainersVersion, + 'testContainersJunit': 'org.testcontainers:junit-jupiter:' + testContainersVersion, + 'testContainersPostgresql':'org.testcontainers:postgresql:' + testContainersVersion, + 'testContainersElasticsearch': 'org.testcontainers:elasticsearch:' + testContainersVersion, + 'testContainersCassandra': 'org.testcontainers:cassandra:' + testContainersVersion, 'typesafeConfig':'com.typesafe:config:1.4.1', 'wiremock':'com.github.tomakehurst:wiremock:2.10.0', 'zookeeper': 'org.apache.zookeeper:zookeeper:3.4.14' diff --git a/datahub-frontend/app/auth/AuthModule.java b/datahub-frontend/app/auth/AuthModule.java index 0773d95b45cf58..92d495c3edc4f0 100644 --- a/datahub-frontend/app/auth/AuthModule.java +++ b/datahub-frontend/app/auth/AuthModule.java @@ -1,8 +1,13 @@ package auth; +import auth.sso.SsoConfigs; +import auth.sso.SsoManager; +import auth.sso.oidc.OidcConfigs; +import auth.sso.oidc.OidcProvider; import client.AuthServiceClient; import com.datahub.authentication.Actor; import com.datahub.authentication.ActorType; +import com.datahub.authentication.Authentication; import com.google.inject.AbstractModule; import com.google.inject.Provides; import com.google.inject.Singleton; @@ -10,10 +15,14 @@ import com.linkedin.entity.client.RestliEntityClient; import com.linkedin.metadata.restli.DefaultRestliClientFactory; import com.linkedin.util.Configuration; -import com.datahub.authentication.Authentication; +import controllers.SsoCallbackController; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; import java.util.Collections; +import java.util.List; import org.apache.commons.codec.digest.DigestUtils; +import org.apache.http.impl.client.CloseableHttpClient; +import org.apache.http.impl.client.HttpClients; import org.pac4j.core.client.Client; import org.pac4j.core.client.Clients; import org.pac4j.core.config.Config; @@ -25,14 +34,6 @@ import org.pac4j.play.store.PlaySessionStore; import org.pac4j.play.store.ShiroAesDataEncrypter; import play.Environment; - -import java.util.ArrayList; -import java.util.List; -import auth.sso.oidc.OidcProvider; -import auth.sso.oidc.OidcConfigs; -import auth.sso.SsoConfigs; -import auth.sso.SsoManager; -import controllers.SsoCallbackController; import play.cache.SyncCacheApi; import utils.ConfigUtil; @@ -142,44 +143,49 @@ protected SsoManager provideSsoManager() { return manager; } - @Provides @Singleton + @Provides + @Singleton protected Authentication provideSystemAuthentication() { // Returns an instance of Authentication used to authenticate system initiated calls to Metadata Service. String systemClientId = _configs.getString(SYSTEM_CLIENT_ID_CONFIG_PATH); String systemSecret = _configs.getString(SYSTEM_CLIENT_SECRET_CONFIG_PATH); - final Actor systemActor = new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. - return new Authentication( - systemActor, - String.format("Basic %s:%s", systemClientId, systemSecret), - Collections.emptyMap() - ); + final Actor systemActor = + new Actor(ActorType.USER, systemClientId); // TODO: Change to service actor once supported. + return new Authentication(systemActor, String.format("Basic %s:%s", systemClientId, systemSecret), + Collections.emptyMap()); } - @Provides @Singleton + @Provides + @Singleton protected EntityClient provideEntityClient() { return new RestliEntityClient(buildRestliClient()); } - @Provides @Singleton - protected AuthServiceClient provideAuthClient(Authentication systemAuthentication) { + @Provides + @Singleton + protected CloseableHttpClient provideHttpClient() { + return HttpClients.createDefault(); + } + + @Provides + @Singleton + protected AuthServiceClient provideAuthClient(Authentication systemAuthentication, CloseableHttpClient httpClient) { // Init a GMS auth client - final String metadataServiceHost = _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) - ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) - : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); - - final int metadataServicePort = _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) - ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) - : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); - - final Boolean metadataServiceUseSsl = _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) - ? _configs.getBoolean(METADATA_SERVICE_USE_SSL_CONFIG_PATH) - : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); - - return new AuthServiceClient( - metadataServiceHost, - metadataServicePort, - metadataServiceUseSsl, - systemAuthentication); + final String metadataServiceHost = + _configs.hasPath(METADATA_SERVICE_HOST_CONFIG_PATH) ? _configs.getString(METADATA_SERVICE_HOST_CONFIG_PATH) + : Configuration.getEnvironmentVariable(GMS_HOST_ENV_VAR, DEFAULT_GMS_HOST); + + final int metadataServicePort = + _configs.hasPath(METADATA_SERVICE_PORT_CONFIG_PATH) ? _configs.getInt(METADATA_SERVICE_PORT_CONFIG_PATH) + : Integer.parseInt(Configuration.getEnvironmentVariable(GMS_PORT_ENV_VAR, DEFAULT_GMS_PORT)); + + final Boolean metadataServiceUseSsl = + _configs.hasPath(METADATA_SERVICE_USE_SSL_CONFIG_PATH) ? _configs.getBoolean( + METADATA_SERVICE_USE_SSL_CONFIG_PATH) + : Boolean.parseBoolean(Configuration.getEnvironmentVariable(GMS_USE_SSL_ENV_VAR, DEFAULT_GMS_USE_SSL)); + + return new AuthServiceClient(metadataServiceHost, metadataServicePort, metadataServiceUseSsl, + systemAuthentication, httpClient); } private com.linkedin.restli.client.Client buildRestliClient() { diff --git a/datahub-frontend/app/client/AuthServiceClient.java b/datahub-frontend/app/client/AuthServiceClient.java index 9b6b869e4ae3fa..44ee9198684d49 100644 --- a/datahub-frontend/app/client/AuthServiceClient.java +++ b/datahub-frontend/app/client/AuthServiceClient.java @@ -12,7 +12,6 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.entity.StringEntity; import org.apache.http.impl.client.CloseableHttpClient; -import org.apache.http.impl.client.HttpClients; import org.apache.http.util.EntityUtils; import play.mvc.Http; @@ -27,6 +26,7 @@ public class AuthServiceClient { private static final String SIGN_UP_ENDPOINT = "auth/signUp"; private static final String RESET_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/resetNativeUserCredentials"; private static final String VERIFY_NATIVE_USER_CREDENTIALS_ENDPOINT = "auth/verifyNativeUserCredentials"; + private static final String TRACK_ENDPOINT = "auth/track"; private static final String ACCESS_TOKEN_FIELD = "accessToken"; private static final String USER_ID_FIELD = "userId"; private static final String USER_URN_FIELD = "userUrn"; @@ -44,13 +44,16 @@ public class AuthServiceClient { private final Integer metadataServicePort; private final Boolean metadataServiceUseSsl; private final Authentication systemAuthentication; + private final CloseableHttpClient httpClient; public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull final Integer metadataServicePort, - @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication) { + @Nonnull final Boolean useSsl, @Nonnull final Authentication systemAuthentication, + @Nonnull final CloseableHttpClient httpClient) { this.metadataServiceHost = Objects.requireNonNull(metadataServiceHost); this.metadataServicePort = Objects.requireNonNull(metadataServicePort); this.metadataServiceUseSsl = Objects.requireNonNull(useSsl); this.systemAuthentication = Objects.requireNonNull(systemAuthentication); + this.httpClient = Objects.requireNonNull(httpClient); } /** @@ -62,7 +65,7 @@ public AuthServiceClient(@Nonnull final String metadataServiceHost, @Nonnull fin @Nonnull public String generateSessionTokenForUser(@Nonnull final String userId) { Objects.requireNonNull(userId, "userId must not be null"); - CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = null; try { @@ -81,7 +84,7 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { // Add authorization header with DataHub frontend system id and secret. request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); - CloseableHttpResponse response = httpClient.execute(request); + response = httpClient.execute(request); final HttpEntity entity = response.getEntity(); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { // Successfully generated a token for the User @@ -96,9 +99,11 @@ public String generateSessionTokenForUser(@Nonnull final String userId) { throw new RuntimeException("Failed to generate session token for user", e); } finally { try { - httpClient.close(); + if (response != null) { + response.close(); + } } catch (Exception e) { - log.warn("Failed to close http client", e); + log.error("Failed to close http response", e); } } } @@ -114,7 +119,7 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN Objects.requireNonNull(title, "title must not be null"); Objects.requireNonNull(password, "password must not be null"); Objects.requireNonNull(inviteToken, "inviteToken must not be null"); - CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = null; try { @@ -138,7 +143,7 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN // Add authorization header with DataHub frontend system id and secret. request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); - CloseableHttpResponse response = httpClient.execute(request); + response = httpClient.execute(request); final HttpEntity entity = response.getEntity(); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { // Successfully generated a token for the User @@ -153,9 +158,11 @@ public boolean signUp(@Nonnull final String userUrn, @Nonnull final String fullN throw new RuntimeException(String.format("Failed to create user %s", userUrn), e); } finally { try { - httpClient.close(); + if (response != null) { + response.close(); + } } catch (Exception e) { - log.warn("Failed to close http client", e); + log.error("Failed to close http response", e); } } } @@ -168,7 +175,7 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); Objects.requireNonNull(resetToken, "reset token must not be null"); - CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = null; try { @@ -189,7 +196,7 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul // Add authorization header with DataHub frontend system id and secret. request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); - CloseableHttpResponse response = httpClient.execute(request); + response = httpClient.execute(request); final HttpEntity entity = response.getEntity(); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { // Successfully generated a token for the User @@ -204,9 +211,11 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul throw new RuntimeException("Failed to reset credentials for user", e); } finally { try { - httpClient.close(); + if (response != null) { + response.close(); + } } catch (Exception e) { - log.warn("Failed to close http client", e); + log.error("Failed to close http response", e); } } } @@ -217,7 +226,7 @@ public boolean resetNativeUserCredentials(@Nonnull final String userUrn, @Nonnul public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnull final String password) { Objects.requireNonNull(userUrn, "userUrn must not be null"); Objects.requireNonNull(password, "password must not be null"); - CloseableHttpClient httpClient = HttpClients.createDefault(); + CloseableHttpResponse response = null; try { @@ -237,7 +246,7 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu // Add authorization header with DataHub frontend system id and secret. request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); - CloseableHttpResponse response = httpClient.execute(request); + response = httpClient.execute(request); final HttpEntity entity = response.getEntity(); if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK && entity != null) { // Successfully generated a token for the User @@ -252,9 +261,51 @@ public boolean verifyNativeUserCredentials(@Nonnull final String userUrn, @Nonnu throw new RuntimeException("Failed to verify credentials for user", e); } finally { try { - httpClient.close(); + if (response != null) { + response.close(); + } } catch (Exception e) { - log.warn("Failed to close http client", e); + log.error("Failed to close http response", e); + } + } + } + + /** + * Call the Auth Service to track an analytics event + */ + public void track(@Nonnull final String event) { + Objects.requireNonNull(event, "event must not be null"); + CloseableHttpResponse response = null; + + try { + final String protocol = this.metadataServiceUseSsl ? "https" : "http"; + final HttpPost request = new HttpPost( + String.format("%s://%s:%s/%s", protocol, this.metadataServiceHost, this.metadataServicePort, + TRACK_ENDPOINT)); + + // Build JSON request to track event. + request.setEntity(new StringEntity(event)); + + // Add authorization header with DataHub frontend system id and secret. + request.addHeader(Http.HeaderNames.AUTHORIZATION, this.systemAuthentication.getCredentials()); + + response = httpClient.execute(request); + final HttpEntity entity = response.getEntity(); + + if (response.getStatusLine().getStatusCode() != HttpStatus.SC_OK || entity == null) { + throw new RuntimeException( + String.format("Bad response from the Metadata Service: %s %s", response.getStatusLine().toString(), + response.getEntity().toString())); + } + } catch (Exception e) { + throw new RuntimeException("Failed to track event", e); + } finally { + try { + if (response != null) { + response.close(); + } + } catch (Exception e) { + log.error("Failed to close http response", e); } } } diff --git a/datahub-frontend/app/controllers/AuthenticationController.java b/datahub-frontend/app/controllers/AuthenticationController.java index 6b49f288bd8623..5620288a48979b 100644 --- a/datahub-frontend/app/controllers/AuthenticationController.java +++ b/datahub-frontend/app/controllers/AuthenticationController.java @@ -12,8 +12,6 @@ import com.typesafe.config.Config; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; -import java.time.Duration; -import java.time.temporal.ChronoUnit; import java.util.HashMap; import java.util.Map; import java.util.Optional; @@ -141,12 +139,7 @@ public Result logIn(Http.Request request) { final Urn actorUrn = new CorpuserUrn(username); final String accessToken = _authClient.generateSessionTokenForUser(actorUrn.getId()); - Result result = ok().withSession(createSessionMap(actorUrn.toString(), accessToken)) - .withCookies(Http.Cookie.builder(ACTOR, actorUrn.toString()) - .withHttpOnly(false) - .withMaxAge(Duration.of(30, ChronoUnit.DAYS)) - .build()); - return result; + return createSession(actorUrn.toString(), accessToken); } /** @@ -200,11 +193,7 @@ public Result signUp(Http.Request request) { final String userUrnString = userUrn.toString(); _authClient.signUp(userUrnString, fullName, email, title, password, inviteToken); final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return ok().withSession(createSessionMap(userUrnString, accessToken)) - .withCookies(Http.Cookie.builder(ACTOR, userUrnString) - .withHttpOnly(false) - .withMaxAge(Duration.of(30, ChronoUnit.DAYS)) - .build()); + return createSession(userUrnString, accessToken); } /** @@ -246,11 +235,7 @@ public Result resetNativeUserCredentials(Http.Request request) { final String userUrnString = userUrn.toString(); _authClient.resetNativeUserCredentials(userUrnString, password, resetToken); final String accessToken = _authClient.generateSessionTokenForUser(userUrn.getId()); - return ok().withSession(createSessionMap(userUrnString, accessToken)) - .withCookies(Http.Cookie.builder(ACTOR, userUrnString) - .withHttpOnly(false) - .withMaxAge(Duration.of(30, ChronoUnit.DAYS)) - .build()); + return createSession(userUrnString, accessToken); } private Result redirectToIdentityProvider() { @@ -309,6 +294,13 @@ private boolean tryLogin(String username, String password) { return loginSucceeded; } + private Result createSession(String userUrnString, String accessToken) { + int ttlInHours = _configs.hasPath(SESSION_TTL_CONFIG_PATH) ? _configs.getInt(SESSION_TTL_CONFIG_PATH) + : DEFAULT_SESSION_TTL_HOURS; + return ok().withSession(createSessionMap(userUrnString, accessToken)) + .withCookies(createActorCookie(userUrnString, ttlInHours)); + } + private Map createSessionMap(final String userUrnStr, final String accessToken) { final Map sessionAttributes = new HashMap<>(); sessionAttributes.put(ACTOR, userUrnStr); diff --git a/datahub-frontend/app/controllers/TrackingController.java b/datahub-frontend/app/controllers/TrackingController.java index 6263550c7588dd..980d2f4bcd2035 100644 --- a/datahub-frontend/app/controllers/TrackingController.java +++ b/datahub-frontend/app/controllers/TrackingController.java @@ -1,7 +1,16 @@ package controllers; +import auth.Authenticator; +import client.AuthServiceClient; import com.fasterxml.jackson.databind.JsonNode; import com.typesafe.config.Config; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Optional; +import java.util.Properties; +import javax.annotation.Nonnull; +import javax.inject.Inject; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.ProducerConfig; @@ -11,19 +20,10 @@ import org.apache.kafka.common.security.auth.SecurityProtocol; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import auth.Authenticator; -import javax.annotation.Nonnull; -import javax.inject.Inject; - import play.mvc.Controller; +import play.mvc.Http; import play.mvc.Result; import play.mvc.Security; - -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Optional; -import java.util.Properties; import utils.ConfigUtil; import static auth.AuthUtils.*; @@ -35,7 +35,7 @@ public class TrackingController extends Controller { private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName()); private static final List KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList( - Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(), + Arrays.asList(SecurityProtocol.SSL.name(), SecurityProtocol.SASL_SSL.name(), SecurityProtocol.SASL_PLAINTEXT.name())); private final Boolean _isEnabled; @@ -43,6 +43,9 @@ public class TrackingController extends Controller { private final KafkaProducer _producer; private final String _topic; + @Inject + AuthServiceClient _authClient; + @Inject public TrackingController(@Nonnull Config config) { _config = config; @@ -59,7 +62,7 @@ public TrackingController(@Nonnull Config config) { @Security.Authenticated(Authenticator.class) @Nonnull - public Result track() throws Exception { + public Result track(Http.Request request) throws Exception { if (!_isEnabled) { // If tracking is disabled, simply return a 200. return status(200); @@ -67,7 +70,7 @@ public Result track() throws Exception { JsonNode event; try { - event = request().body().asJson(); + event = request.body().asJson(); } catch (Exception e) { return badRequest(); } @@ -75,12 +78,13 @@ public Result track() throws Exception { try { _logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event)); final ProducerRecord record = new ProducerRecord<>( - _topic, - actor, - event.toString()); - _producer.send(record); - _producer.flush(); - return ok(); + _topic, + actor, + event.toString()); + _producer.send(record); + _producer.flush(); + _authClient.track(event.toString()); + return ok(); } catch (Exception e) { _logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event)); return internalServerError(e.getMessage()); diff --git a/datahub-frontend/conf/routes b/datahub-frontend/conf/routes index ef6bd32559f56f..7acc922b1804a6 100644 --- a/datahub-frontend/conf/routes +++ b/datahub-frontend/conf/routes @@ -36,7 +36,7 @@ PUT /openapi/*path con GET /assets/*file controllers.Assets.at(path="/public", file) # Analytics route -POST /track controllers.TrackingController.track() +POST /track controllers.TrackingController.track(request: Request) # Wildcard route accepts any routes and delegates to serveAsset which in turn serves the React Bundle GET /*path controllers.Application.index(path) diff --git a/datahub-frontend/play.gradle b/datahub-frontend/play.gradle index 123cce7eb3cfbf..fb08cbddc1b071 100644 --- a/datahub-frontend/play.gradle +++ b/datahub-frontend/play.gradle @@ -72,7 +72,7 @@ play { platform { playVersion = '2.7.6' scalaVersion = '2.12' - javaVersion = JavaVersion.VERSION_1_8 + javaVersion = JavaVersion.VERSION_11 } injectedRoutesGenerator = true @@ -81,7 +81,7 @@ play { model { components { play { - platform play: '2.7.6', scala: '2.12', java: '1.8' + platform play: '2.7.6', scala: '2.12', java: '11' injectedRoutesGenerator = true binaries.all { diff --git a/datahub-graphql-core/build.gradle b/datahub-graphql-core/build.gradle index 62d77f03bbb5c6..aa13ce05d7d59e 100644 --- a/datahub-graphql-core/build.gradle +++ b/datahub-graphql-core/build.gradle @@ -9,7 +9,8 @@ dependencies { compile project(':metadata-io') compile project(':metadata-utils') - compile externalDependency.graphqlJava + implementation externalDependency.graphqlJava + implementation externalDependency.graphqlJavaScalars compile externalDependency.antlr4Runtime compile externalDependency.antlr4 compile externalDependency.guava diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java index c292c0cd3acc34..7e38de010f9acb 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/GmsGraphQLEngine.java @@ -3,6 +3,7 @@ import com.datahub.authentication.AuthenticationConfiguration; import com.datahub.authentication.group.GroupService; import com.datahub.authentication.invite.InviteTokenService; +import com.datahub.authentication.post.PostService; import com.datahub.authentication.token.StatefulTokenService; import com.datahub.authentication.user.NativeUserService; import com.datahub.authorization.AuthorizationConfiguration; @@ -175,6 +176,8 @@ import com.linkedin.datahub.graphql.resolvers.policy.GetGrantedPrivilegesResolver; import com.linkedin.datahub.graphql.resolvers.policy.ListPoliciesResolver; import com.linkedin.datahub.graphql.resolvers.policy.UpsertPolicyResolver; +import com.linkedin.datahub.graphql.resolvers.post.CreatePostResolver; +import com.linkedin.datahub.graphql.resolvers.post.ListPostsResolver; import com.linkedin.datahub.graphql.resolvers.recommendation.ListRecommendationsResolver; import com.linkedin.datahub.graphql.resolvers.role.AcceptRoleResolver; import com.linkedin.datahub.graphql.resolvers.role.BatchAssignRoleResolver; @@ -282,7 +285,7 @@ import static com.linkedin.datahub.graphql.Constants.*; import static com.linkedin.metadata.Constants.*; -import static graphql.Scalars.*; +import static graphql.scalars.ExtendedScalars.*; /** @@ -310,6 +313,7 @@ public class GmsGraphQLEngine { private final GroupService groupService; private final RoleService roleService; private final InviteTokenService inviteTokenService; + private final PostService postService; private final FeatureFlags featureFlags; @@ -386,7 +390,7 @@ public GmsGraphQLEngine(final EntityClient entityClient, final GraphClient graph final VisualConfiguration visualConfiguration, final TelemetryConfiguration telemetryConfiguration, final TestsConfiguration testsConfiguration, final DatahubConfiguration datahubConfiguration, final SiblingGraphService siblingGraphService, final GroupService groupService, final RoleService roleService, - final InviteTokenService inviteTokenService, final FeatureFlags featureFlags) { + final InviteTokenService inviteTokenService, final PostService postService, final FeatureFlags featureFlags) { this.entityClient = entityClient; this.graphClient = graphClient; @@ -407,6 +411,7 @@ public GmsGraphQLEngine(final EntityClient entityClient, final GraphClient graph this.groupService = groupService; this.roleService = roleService; this.inviteTokenService = inviteTokenService; + this.postService = postService; this.ingestionConfiguration = Objects.requireNonNull(ingestionConfiguration); this.authenticationConfiguration = Objects.requireNonNull(authenticationConfiguration); @@ -676,6 +681,7 @@ private void configureQueryResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("entities", getEntitiesResolver()) .dataFetcher("listRoles", new ListRolesResolver(this.entityClient)) .dataFetcher("getInviteToken", new GetInviteTokenResolver(this.inviteTokenService)) + .dataFetcher("listPosts", new ListPostsResolver(this.entityClient)) ); } @@ -761,7 +767,7 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("removeUser", new RemoveUserResolver(this.entityClient)) .dataFetcher("removeGroup", new RemoveGroupResolver(this.entityClient)) .dataFetcher("updateUserStatus", new UpdateUserStatusResolver(this.entityClient)) - .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient)) + .dataFetcher("createDomain", new CreateDomainResolver(this.entityClient, this.entityService)) .dataFetcher("deleteDomain", new DeleteDomainResolver(entityClient)) .dataFetcher("setDomain", new SetDomainResolver(this.entityClient, this.entityService)) .dataFetcher("batchSetDomain", new BatchSetDomainResolver(this.entityService)) @@ -783,8 +789,8 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("updateTest", new UpdateTestResolver(this.entityClient)) .dataFetcher("deleteTest", new DeleteTestResolver(this.entityClient)) .dataFetcher("reportOperation", new ReportOperationResolver(this.entityClient)) - .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient)) - .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient)) + .dataFetcher("createGlossaryTerm", new CreateGlossaryTermResolver(this.entityClient, this.entityService)) + .dataFetcher("createGlossaryNode", new CreateGlossaryNodeResolver(this.entityClient, this.entityService)) .dataFetcher("updateParentNode", new UpdateParentNodeResolver(entityService)) .dataFetcher("deleteGlossaryEntity", new DeleteGlossaryEntityResolver(this.entityClient, this.entityService)) @@ -798,7 +804,7 @@ private void configureMutationResolvers(final RuntimeWiring.Builder builder) { .dataFetcher("batchAssignRole", new BatchAssignRoleResolver(this.roleService)) .dataFetcher("createInviteToken", new CreateInviteTokenResolver(this.inviteTokenService)) .dataFetcher("acceptRole", new AcceptRoleResolver(this.roleService, this.inviteTokenService)) - + .dataFetcher("createPost", new CreatePostResolver(this.postService)) ); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java index f4034769082953..be7ab57eb8aa15 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/authorization/AuthorizationUtils.java @@ -107,6 +107,10 @@ public static boolean canEditGroupMembers(@Nonnull String groupUrnStr, @Nonnull groupUrnStr, orPrivilegeGroups); } + public static boolean canCreateGlobalAnnouncements(@Nonnull QueryContext context) { + return isAuthorized(context, Optional.empty(), PoliciesConfig.CREATE_GLOBAL_ANNOUNCEMENTS_PRIVILEGE); + } + public static boolean isAuthorized( @Nonnull QueryContext context, @Nonnull Optional resourceSpec, diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java index d7e530d9dbe4d5..15c539a608cc05 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/exception/DataHubGraphQLError.java @@ -38,6 +38,10 @@ private Map buildExtensions(DataHubGraphQLErrorCode errorCode) { return extensions; } + public int getErrorCode() { + return errorCode.getCode(); + } + @Override public String getMessage() { return message; diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java index 7ecb8548519c13..bc9af99afc190c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/ResolverUtils.java @@ -3,10 +3,13 @@ import com.datahub.authentication.Authentication; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableSet; +import com.linkedin.data.template.StringArray; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.exception.ValidationException; import com.linkedin.datahub.graphql.generated.FacetFilterInput; +import com.linkedin.datahub.graphql.generated.OrFilter; +import com.linkedin.metadata.query.filter.Condition; import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; @@ -81,20 +84,77 @@ public static Map buildFacetFilters(@Nullable List criterionListFromAndFilter(List andFilters) { + return andFilters != null && !andFilters.isEmpty() + ? andFilters.stream() + .map(filter -> criterionFromFilter(filter)) + .collect(Collectors.toList()) : Collections.emptyList(); + + } + + // In the case that user sends filters to be or-d together, we need to build a series of conjunctive criterion + // arrays, rather than just one for the AND case. + public static ConjunctiveCriterionArray buildConjunctiveCriterionArrayWithOr( + @Nonnull List orFilters + ) { + return new ConjunctiveCriterionArray(orFilters.stream().map(orFilter -> { + CriterionArray andCriterionForOr = new CriterionArray(criterionListFromAndFilter(orFilter.getAnd())); + return new ConjunctiveCriterion().setAnd( + andCriterionForOr + ); + } + ).collect(Collectors.toList())); + } + @Nullable - public static Filter buildFilter(@Nullable List facetFilterInputs) { - if (facetFilterInputs == null || facetFilterInputs.isEmpty()) { + public static Filter buildFilter(@Nullable List andFilters, @Nullable List orFilters) { + if ((andFilters == null || andFilters.isEmpty()) && (orFilters == null || orFilters.isEmpty())) { return null; } - return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(facetFilterInputs.stream() - .map(filter -> new Criterion().setField(getFilterField(filter.getField())).setValue(filter.getValue())) - .collect(Collectors.toList()))))); + + // Or filters are the new default. We will check them first. + // If we have OR filters, we need to build a series of CriterionArrays + if (orFilters != null && !orFilters.isEmpty()) { + return new Filter().setOr(buildConjunctiveCriterionArrayWithOr(orFilters)); + } + + // If or filters are not set, someone may be using the legacy and filters + final List andCriterions = criterionListFromAndFilter(andFilters); + return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(andCriterions)))); + } + + // Translates a FacetFilterInput (graphql input class) into Criterion (our internal model) + public static Criterion criterionFromFilter(final FacetFilterInput filter) { + Criterion result = new Criterion(); + result.setField(getFilterField(filter.getField())); + if (filter.getValues() != null) { + result.setValues(new StringArray(filter.getValues())); + if (!filter.getValues().isEmpty()) { + result.setValue(filter.getValues().get(0)); + } else { + result.setValue(""); + } + } + + if (filter.getCondition() != null) { + result.setCondition(Condition.valueOf(filter.getCondition().toString())); + } else { + result.setCondition(Condition.EQUAL); + } + + if (filter.getNegated() != null) { + result.setNegated(filter.getNegated()); + } + + return result; } private static String getFilterField(final String originalField) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java index 630e6718ba0dae..fafac5df0360a5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/assertion/AssertionRunEventResolver.java @@ -9,14 +9,12 @@ import com.linkedin.datahub.graphql.generated.AssertionRunStatus; import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.FilterInput; -import com.linkedin.datahub.graphql.generated.SearchCondition; import com.linkedin.datahub.graphql.types.dataset.mappers.AssertionRunEventMapper; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import com.linkedin.metadata.aspect.EnvelopedAspect; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.r2.RemoteInvocationException; @@ -102,13 +100,16 @@ private Filter buildFilter(@Nullable FilterInput filtersInput, @Nullable final S } List facetFilters = new ArrayList<>(); if (status != null) { - facetFilters.add(new FacetFilterInput("status", status, ImmutableList.of(status), false, SearchCondition.EQUAL)); + FacetFilterInput filter = new FacetFilterInput(); + filter.setField("status"); + filter.setValues(ImmutableList.of(status)); + facetFilters.add(filter); } if (filtersInput != null) { facetFilters.addAll(filtersInput.getAnd()); } return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(facetFilters.stream() - .map(filter -> new Criterion().setField(filter.getField()).setValue(filter.getValue())) + .map(filter -> criterionFromFilter(filter)) .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java index b6bd4a7d89c89d..96900f7a50a831 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.auth; +import com.google.common.collect.ImmutableList; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; @@ -55,7 +56,7 @@ public CompletableFuture get(DataFetchingEnvironment envi new SortCriterion().setField(EXPIRES_AT_FIELD_NAME).setOrder(SortOrder.DESCENDING); final SearchResult searchResult = _entityClient.search(Constants.ACCESS_TOKEN_ENTITY_NAME, "", - buildFilter(filters), sortCriterion, start, count, + buildFilter(filters, Collections.emptyList()), sortCriterion, start, count, getAuthentication(environment)); final List tokens = searchResult.getEntities().stream().map(entity -> { @@ -94,6 +95,6 @@ public CompletableFuture get(DataFetchingEnvironment envi */ private boolean isListingSelfTokens(final List filters, final QueryContext context) { return AuthorizationUtils.canGeneratePersonalAccessToken(context) && filters.stream() - .anyMatch(filter -> filter.getField().equals("ownerUrn") && filter.getValue().equals(context.getActorUrn())); + .anyMatch(filter -> filter.getField().equals("ownerUrn") && filter.getValues().equals(ImmutableList.of(context.getActorUrn()))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java index eb53c5926af324..c025e465d9d842 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolver.java @@ -5,9 +5,13 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateDomainInput; +import com.linkedin.datahub.graphql.generated.OwnerEntityType; +import com.linkedin.datahub.graphql.generated.OwnershipType; +import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.domain.DomainProperties; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -30,6 +34,7 @@ public class CreateDomainResolver implements DataFetcher> { private final EntityClient _entityClient; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -63,7 +68,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws proposal.setAspect(GenericRecordUtils.serializeAspect(mapDomainProperties(input))); proposal.setChangeType(ChangeType.UPSERT); - return _entityClient.ingestProposal(proposal, context.getAuthentication()); + String domainUrn = _entityClient.ingestProposal(proposal, context.getAuthentication()); + OwnerUtils.addCreatorAsOwner(context, domainUrn, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, _entityService); + return domainUrn; } catch (Exception e) { log.error("Failed to create Domain with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); throw new RuntimeException(String.format("Failed to create Domain with id: %s, name: %s", input.getId(), input.getName()), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java index 04b752ff5e7d68..f32f95e9da5f7c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolver.java @@ -6,9 +6,13 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; +import com.linkedin.datahub.graphql.generated.OwnerEntityType; +import com.linkedin.datahub.graphql.generated.OwnershipType; +import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -30,6 +34,7 @@ public class CreateGlossaryNodeResolver implements DataFetcher> { private final EntityClient _entityClient; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -56,7 +61,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws proposal.setAspect(GenericRecordUtils.serializeAspect(mapGlossaryNodeInfo(input))); proposal.setChangeType(ChangeType.UPSERT); - return _entityClient.ingestProposal(proposal, context.getAuthentication()); + String glossaryNodeUrn = _entityClient.ingestProposal(proposal, context.getAuthentication()); + OwnerUtils.addCreatorAsOwner(context, glossaryNodeUrn, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, _entityService); + return glossaryNodeUrn; } catch (Exception e) { log.error("Failed to create GlossaryNode with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); throw new RuntimeException(String.format("Failed to create GlossaryNode with id: %s, name: %s", input.getId(), input.getName()), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java index e40d159f99969e..191f99e8aa00e0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolver.java @@ -6,9 +6,13 @@ import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateGlossaryEntityInput; +import com.linkedin.datahub.graphql.generated.OwnerEntityType; +import com.linkedin.datahub.graphql.generated.OwnershipType; +import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; import com.linkedin.glossary.GlossaryTermInfo; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.metadata.utils.EntityKeyUtils; @@ -30,6 +34,7 @@ public class CreateGlossaryTermResolver implements DataFetcher> { private final EntityClient _entityClient; + private final EntityService _entityService; @Override public CompletableFuture get(DataFetchingEnvironment environment) throws Exception { @@ -56,7 +61,9 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws proposal.setAspect(GenericRecordUtils.serializeAspect(mapGlossaryTermInfo(input))); proposal.setChangeType(ChangeType.UPSERT); - return _entityClient.ingestProposal(proposal, context.getAuthentication()); + String glossaryTermUrn = _entityClient.ingestProposal(proposal, context.getAuthentication()); + OwnerUtils.addCreatorAsOwner(context, glossaryTermUrn, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, _entityService); + return glossaryTermUrn; } catch (Exception e) { log.error("Failed to create GlossaryTerm with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); throw new RuntimeException(String.format("Failed to create GlossaryTerm with id: %s, name: %s", input.getId(), input.getName()), e); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java index 86d9c233a2c994..2bae315d0a0a97 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/load/TimeSeriesAspectResolver.java @@ -12,7 +12,6 @@ import com.linkedin.metadata.authorization.PoliciesConfig; import com.linkedin.metadata.query.filter.ConjunctiveCriterion; import com.linkedin.metadata.query.filter.ConjunctiveCriterionArray; -import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.query.filter.Filter; import com.linkedin.r2.RemoteInvocationException; @@ -111,7 +110,7 @@ private Filter buildFilters(@Nullable FilterInput maybeFilters) { return null; } return new Filter().setOr(new ConjunctiveCriterionArray(new ConjunctiveCriterion().setAnd(new CriterionArray(maybeFilters.getAnd().stream() - .map(filter -> new Criterion().setField(filter.getField()).setValue(filter.getValue())) + .map(filter -> criterionFromFilter(filter)) .collect(Collectors.toList()))))); } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java index ab7f645887f903..3cc64aed7f5621 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/MutationUtils.java @@ -30,7 +30,7 @@ public static void persistAspect(Urn urn, String aspectName, RecordTemplate aspe proposal.setAspectName(aspectName); proposal.setAspect(GenericRecordUtils.serializeAspect(aspect)); proposal.setChangeType(ChangeType.UPSERT); - entityService.ingestProposal(proposal, getAuditStamp(actor)); + entityService.ingestProposal(proposal, getAuditStamp(actor), false); } public static MetadataChangeProposal buildMetadataChangeProposal(Urn urn, String aspectName, RecordTemplate aspect, Urn actor, EntityService entityService) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java index 6529a3a66bfa84..86d404c35ae7bc 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateNameResolver.java @@ -10,6 +10,7 @@ import com.linkedin.domain.DomainProperties; import com.linkedin.glossary.GlossaryTermInfo; import com.linkedin.glossary.GlossaryNodeInfo; +import com.linkedin.identity.CorpGroupInfo; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; import graphql.schema.DataFetcher; @@ -47,6 +48,8 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw return updateGlossaryNodeName(targetUrn, input, environment.getContext()); case Constants.DOMAIN_ENTITY_NAME: return updateDomainName(targetUrn, input, environment.getContext()); + case Constants.CORP_GROUP_ENTITY_NAME: + return updateGroupName(targetUrn, input, environment.getContext()); default: throw new RuntimeException( String.format("Failed to update name. Unsupported resource type %s provided.", targetUrn)); @@ -125,4 +128,28 @@ private Boolean updateDomainName( } throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); } + + private Boolean updateGroupName( + Urn targetUrn, + UpdateNameInput input, + QueryContext context + ) { + if (AuthorizationUtils.canManageUsersAndGroups(context)) { + try { + CorpGroupInfo corpGroupInfo = (CorpGroupInfo) getAspectFromEntity( + targetUrn.toString(), Constants.CORP_GROUP_INFO_ASPECT_NAME, _entityService, null); + if (corpGroupInfo == null) { + throw new IllegalArgumentException("Group does not exist"); + } + corpGroupInfo.setDisplayName(input.getName()); + Urn actor = CorpuserUrn.createFromString(context.getActorUrn()); + persistAspect(targetUrn, Constants.CORP_GROUP_INFO_ASPECT_NAME, corpGroupInfo, actor, _entityService); + + return true; + } catch (Exception e) { + throw new RuntimeException(String.format("Failed to perform update against input %s", input), e); + } + } + throw new AuthorizationException("Unauthorized to perform this action. Please contact your DataHub administrator."); + } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java index 47678973a515db..86a8415da3d39a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolver.java @@ -51,7 +51,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throw MetadataChangeProposal proposal = buildMetadataChangeProposal(actor, CORP_USER_SETTINGS_ASPECT_NAME, newSettings, actor, _entityService); - _entityService.ingestProposal(proposal, getAuditStamp(actor)); + _entityService.ingestProposal(proposal, getAuditStamp(actor), false); return true; } catch (Exception e) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java index 75c54277e2b9d7..1828b6eb83f637 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeleteUtils.java @@ -74,7 +74,7 @@ private static MetadataChangeProposal buildSoftDeleteProposal( private static void ingestChangeProposals(List changes, EntityService entityService, Urn actor) { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { - entityService.ingestProposal(change, getAuditStamp(actor)); + entityService.ingestProposal(change, getAuditStamp(actor), false); } } } \ No newline at end of file diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java index 48af0b401084e4..3a12dd8b6eb75c 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DeprecationUtils.java @@ -89,7 +89,7 @@ private static MetadataChangeProposal buildUpdateDeprecationProposal( private static void ingestChangeProposals(List changes, EntityService entityService, Urn actor) { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { - entityService.ingestProposal(change, getAuditStamp(actor)); + entityService.ingestProposal(change, getAuditStamp(actor), false); } } } \ No newline at end of file diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java index addd0bbd2b9f10..e0e964b02fa23a 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/DomainUtils.java @@ -88,7 +88,7 @@ public static void validateDomain(Urn domainUrn, EntityService entityService) { private static void ingestChangeProposals(List changes, EntityService entityService, Urn actor) { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { - entityService.ingestProposal(change, getAuditStamp(actor)); + entityService.ingestProposal(change, getAuditStamp(actor), false); } } } \ No newline at end of file diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java index 62fe5531ffb6dc..7f9c44e29e2ab0 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/LabelUtils.java @@ -556,7 +556,7 @@ private static GlossaryTermAssociationArray removeTermsIfExists(GlossaryTerms te private static void ingestChangeProposals(List changes, EntityService entityService, Urn actor) { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { - entityService.ingestProposal(change, getAuditStamp(actor)); + entityService.ingestProposal(change, getAuditStamp(actor), false); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java index 4c13367f5da0ed..497596d1d778c9 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/mutate/util/OwnerUtils.java @@ -1,6 +1,7 @@ package com.linkedin.datahub.graphql.resolvers.mutate.util; import com.google.common.collect.ImmutableList; +import com.linkedin.common.urn.CorpuserUrn; import com.linkedin.common.Owner; import com.linkedin.common.OwnerArray; @@ -34,6 +35,7 @@ // TODO: Move to consuming from OwnerService @Slf4j public class OwnerUtils { + private static final ConjunctivePrivilegeGroup ALL_PRIVILEGES_GROUP = new ConjunctivePrivilegeGroup(ImmutableList.of( PoliciesConfig.EDIT_ENTITY_PRIVILEGE.getType() )); @@ -215,7 +217,26 @@ public static Boolean validateRemoveInput( private static void ingestChangeProposals(List changes, EntityService entityService, Urn actor) { // TODO: Replace this with a batch ingest proposals endpoint. for (MetadataChangeProposal change : changes) { - entityService.ingestProposal(change, getAuditStamp(actor)); + entityService.ingestProposal(change, getAuditStamp(actor), false); + } + } + + public static void addCreatorAsOwner( + QueryContext context, + String urn, + OwnerEntityType ownerEntityType, + com.linkedin.datahub.graphql.generated.OwnershipType ownershipType, + EntityService entityService) { + try { + Urn actorUrn = CorpuserUrn.createFromString(context.getActorUrn()); + addOwnersToResources( + ImmutableList.of(new OwnerInput(actorUrn.toString(), ownerEntityType, ownershipType)), + ImmutableList.of(new ResourceRefInput(urn, null, null)), + actorUrn, + entityService + ); + } catch (Exception e) { + log.error(String.format("Failed to add creator as owner of tag %s", urn), e); } } } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java new file mode 100644 index 00000000000000..524caf14e9afe4 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolver.java @@ -0,0 +1,60 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.post.PostService; +import com.linkedin.common.Media; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; +import com.linkedin.datahub.graphql.exception.AuthorizationException; +import com.linkedin.datahub.graphql.generated.CreatePostInput; +import com.linkedin.datahub.graphql.generated.PostContentType; +import com.linkedin.datahub.graphql.generated.PostType; +import com.linkedin.datahub.graphql.generated.UpdateMediaInput; +import com.linkedin.datahub.graphql.generated.UpdatePostContentInput; +import com.linkedin.post.PostContent; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.concurrent.CompletableFuture; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; + + +@Slf4j +@RequiredArgsConstructor +public class CreatePostResolver implements DataFetcher> { + private final PostService _postService; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + + if (!AuthorizationUtils.canCreateGlobalAnnouncements(context)) { + throw new AuthorizationException( + "Unauthorized to create posts. Please contact your DataHub administrator if this needs corrective action."); + } + + final CreatePostInput input = bindArgument(environment.getArgument("input"), CreatePostInput.class); + final PostType type = input.getPostType(); + final UpdatePostContentInput content = input.getContent(); + final PostContentType contentType = content.getContentType(); + final String title = content.getTitle(); + final String link = content.getLink(); + final String description = content.getDescription(); + final UpdateMediaInput updateMediaInput = content.getMedia(); + final Authentication authentication = context.getAuthentication(); + + Media media = updateMediaInput == null ? null + : _postService.mapMedia(updateMediaInput.getType().toString(), updateMediaInput.getLocation()); + PostContent postContent = _postService.mapPostContent(contentType.toString(), title, description, link, media); + + return CompletableFuture.supplyAsync(() -> { + try { + return _postService.createPost(type.toString(), postContent, authentication); + } catch (Exception e) { + throw new RuntimeException("Failed to create a new post", e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java new file mode 100644 index 00000000000000..839c5b5d1add1a --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolver.java @@ -0,0 +1,72 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import com.datahub.authentication.Authentication; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.ListPostsInput; +import com.linkedin.datahub.graphql.generated.ListPostsResult; +import com.linkedin.datahub.graphql.types.post.PostMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.query.filter.SortCriterion; +import com.linkedin.metadata.query.filter.SortOrder; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchResult; +import graphql.schema.DataFetcher; +import graphql.schema.DataFetchingEnvironment; +import java.util.HashSet; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; +import lombok.RequiredArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; +import static com.linkedin.metadata.Constants.*; + + +@Slf4j +@RequiredArgsConstructor +public class ListPostsResolver implements DataFetcher> { + private static final Integer DEFAULT_START = 0; + private static final Integer DEFAULT_COUNT = 20; + private static final String DEFAULT_QUERY = ""; + + private final EntityClient _entityClient; + + @Override + public CompletableFuture get(final DataFetchingEnvironment environment) throws Exception { + final QueryContext context = environment.getContext(); + final Authentication authentication = context.getAuthentication(); + + final ListPostsInput input = bindArgument(environment.getArgument("input"), ListPostsInput.class); + final Integer start = input.getStart() == null ? DEFAULT_START : input.getStart(); + final Integer count = input.getCount() == null ? DEFAULT_COUNT : input.getCount(); + final String query = input.getQuery() == null ? DEFAULT_QUERY : input.getQuery(); + + return CompletableFuture.supplyAsync(() -> { + try { + final SortCriterion sortCriterion = + new SortCriterion().setField(LAST_MODIFIED_FIELD_NAME).setOrder(SortOrder.DESCENDING); + + // First, get all Post Urns. + final SearchResult gmsResult = _entityClient.search(POST_ENTITY_NAME, query, null, sortCriterion, start, count, + context.getAuthentication()); + + // Then, get and hydrate all Posts. + final Map entities = _entityClient.batchGetV2(POST_ENTITY_NAME, + new HashSet<>(gmsResult.getEntities().stream().map(SearchEntity::getEntity).collect(Collectors.toList())), + null, authentication); + + final ListPostsResult result = new ListPostsResult(); + result.setStart(gmsResult.getFrom()); + result.setCount(gmsResult.getPageSize()); + result.setTotal(gmsResult.getNumEntities()); + result.setPosts(entities.values().stream().map(PostMapper::map).collect(Collectors.toList())); + return result; + } catch (Exception e) { + throw new RuntimeException("Failed to list posts", e); + } + }); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java index fc0f1732b56324..f71a7143aa6eb2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/recommendation/ListRecommendationsResolver.java @@ -1,5 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.recommendation; +import com.google.common.collect.ImmutableList; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.generated.ContentParams; import com.linkedin.datahub.graphql.generated.EntityProfileParams; @@ -14,7 +15,6 @@ import com.linkedin.datahub.graphql.generated.SearchParams; import com.linkedin.datahub.graphql.resolvers.EntityTypeMapper; import com.linkedin.datahub.graphql.types.common.mappers.UrnToEntityMapper; -import com.linkedin.metadata.query.filter.Criterion; import com.linkedin.metadata.query.filter.CriterionArray; import com.linkedin.metadata.recommendation.EntityRequestContext; import com.linkedin.metadata.recommendation.RecommendationsService; @@ -31,7 +31,7 @@ import lombok.RequiredArgsConstructor; import lombok.extern.slf4j.Slf4j; -import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.bindArgument; +import static com.linkedin.datahub.graphql.resolvers.ResolverUtils.*; @Slf4j @@ -88,7 +88,7 @@ private com.linkedin.metadata.recommendation.RecommendationRequestContext mapReq searchRequestContext.setFilters(new CriterionArray(requestContext.getSearchRequestContext() .getFilters() .stream() - .map(facetField -> new Criterion().setField(facetField.getField()).setValue(facetField.getValue())) + .map(facetField -> criterionFromFilter(facetField)) .collect(Collectors.toList()))); } mappedRequestContext.setSearchRequestContext(searchRequestContext); @@ -148,7 +148,8 @@ private RecommendationParams mapRecommendationParams( searchParams.setFilters(params.getSearchParams() .getFilters() .stream() - .map(criterion -> Filter.builder().setField(criterion.getField()).setValue(criterion.getValue()).build()) + .map(criterion -> Filter.builder().setField(criterion.getField()).setValues( + ImmutableList.of(criterion.getValue())).build()) .collect(Collectors.toList())); } mappedParams.setSearchParams(searchParams); diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java index adbac977819be9..20a6738c2abca5 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossEntitiesResolver.java @@ -52,7 +52,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) "Executing search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", input.getTypes(), input.getQuery(), input.getFilters(), start, count); return UrnSearchResultsMapper.map(_entityClient.searchAcrossEntities(entityNames, sanitizedQuery, - ResolverUtils.buildFilter(input.getFilters()), start, count, ResolverUtils.getAuthentication(environment))); + ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), start, count, ResolverUtils.getAuthentication(environment))); } catch (Exception e) { log.error( "Failed to execute search for multiple entities: entity types {}, query {}, filters: {}, start: {}, count: {}", diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java index e4485e25439bdd..f63bfd31e01154 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchAcrossLineageResolver.java @@ -72,7 +72,7 @@ public CompletableFuture get(DataFetchingEnvironment urn, resolvedDirection, input.getTypes(), input.getQuery(), filters, start, count); return UrnSearchAcrossLineageResultsMapper.map( _entityClient.searchAcrossLineage(urn, resolvedDirection, entityNames, sanitizedQuery, - maxHops, ResolverUtils.buildFilter(filters), null, start, count, + maxHops, ResolverUtils.buildFilter(filters, input.getOrFilters()), null, start, count, ResolverUtils.getAuthentication(environment))); } catch (RemoteInvocationException e) { log.error( @@ -89,7 +89,7 @@ public CompletableFuture get(DataFetchingEnvironment private Integer getMaxHops(List filters) { Set degreeFilterValues = filters.stream() .filter(filter -> filter.getField().equals("degree")) - .map(FacetFilterInput::getValue) + .flatMap(filter -> filter.getValues().stream()) .collect(Collectors.toSet()); Integer maxHops = null; if (!degreeFilterValues.contains("3+")) { diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java index 1f3df870e45a38..4db2ee957b5bc2 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/search/SearchResolver.java @@ -41,17 +41,17 @@ public CompletableFuture get(DataFetchingEnvironment environment) return CompletableFuture.supplyAsync(() -> { try { - log.debug("Executing search. entity type {}, query {}, filters: {}, start: {}, count: {}", input.getType(), - input.getQuery(), input.getFilters(), start, count); + log.debug("Executing search. entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}", input.getType(), + input.getQuery(), input.getFilters(), input.getOrFilters(), start, count); return UrnSearchResultsMapper.map( - _entityClient.search(entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters()), null, start, + _entityClient.search(entityName, sanitizedQuery, ResolverUtils.buildFilter(input.getFilters(), input.getOrFilters()), null, start, count, ResolverUtils.getAuthentication(environment))); } catch (Exception e) { - log.error("Failed to execute search: entity type {}, query {}, filters: {}, start: {}, count: {}", - input.getType(), input.getQuery(), input.getFilters(), start, count); + log.error("Failed to execute search: entity type {}, query {}, filters: {}, orFilters: {}, start: {}, count: {}", + input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count); throw new RuntimeException( - "Failed to execute search: " + String.format("entity type %s, query %s, filters: %s, start: %s, count: %s", - input.getType(), input.getQuery(), input.getFilters(), start, count), e); + "Failed to execute search: " + String.format("entity type %s, query %s, filters: %s, orFilters: %s, start: %s, count: %s", + input.getType(), input.getQuery(), input.getFilters(), input.getOrFilters(), start, count), e); } }); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java index 273248540f236f..afb349d688670d 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/resolvers/tag/CreateTagResolver.java @@ -1,17 +1,12 @@ package com.linkedin.datahub.graphql.resolvers.tag; -import com.google.common.collect.ImmutableList; -import com.linkedin.common.urn.CorpuserUrn; -import com.linkedin.common.urn.Urn; import com.linkedin.data.template.SetMode; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.authorization.AuthorizationUtils; import com.linkedin.datahub.graphql.exception.AuthorizationException; import com.linkedin.datahub.graphql.generated.CreateTagInput; import com.linkedin.datahub.graphql.generated.OwnerEntityType; -import com.linkedin.datahub.graphql.generated.OwnerInput; import com.linkedin.datahub.graphql.generated.OwnershipType; -import com.linkedin.datahub.graphql.generated.ResourceRefInput; import com.linkedin.datahub.graphql.resolvers.mutate.util.OwnerUtils; import com.linkedin.entity.client.EntityClient; import com.linkedin.events.metadata.ChangeType; @@ -74,7 +69,7 @@ public CompletableFuture get(DataFetchingEnvironment environment) throws proposal.setChangeType(ChangeType.UPSERT); String tagUrn = _entityClient.ingestProposal(proposal, context.getAuthentication()); - addCreatorAsOwner(context, tagUrn); + OwnerUtils.addCreatorAsOwner(context, tagUrn, OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER, _entityService); return tagUrn; } catch (Exception e) { log.error("Failed to create Tag with id: {}, name: {}: {}", input.getId(), input.getName(), e.getMessage()); @@ -89,18 +84,4 @@ private TagProperties mapTagProperties(final CreateTagInput input) { result.setDescription(input.getDescription(), SetMode.IGNORE_NULL); return result; } - - private void addCreatorAsOwner(QueryContext context, String tagUrn) { - try { - Urn actorUrn = CorpuserUrn.createFromString(context.getActorUrn()); - OwnerUtils.addOwnersToResources( - ImmutableList.of(new OwnerInput(actorUrn.toString(), OwnerEntityType.CORP_USER, OwnershipType.TECHNICAL_OWNER)), - ImmutableList.of(new ResourceRefInput(tagUrn, null, null)), - actorUrn, - _entityService - ); - } catch (Exception e) { - log.error(String.format("Failed to add creator as owner of tag %s", tagUrn), e); - } - } } \ No newline at end of file diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java index 87be15ef21f172..1c041db9f8b5df 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/chart/ChartType.java @@ -203,7 +203,7 @@ public Chart update(@Nonnull String urn, @Nonnull ChartUpdateInput input, @Nonnu proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java new file mode 100644 index 00000000000000..40f0ca90b0d9f9 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/common/mappers/UpstreamLineagesMapper.java @@ -0,0 +1,53 @@ +package com.linkedin.datahub.graphql.types.common.mappers; + +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.generated.SchemaFieldRef; +import com.linkedin.dataset.FineGrainedLineage; +import java.util.ArrayList; +import java.util.List; +import java.util.stream.Collectors; +import javax.annotation.Nonnull; + + +/** + * Maps Pegasus {@link RecordTemplate} objects to objects conforming to the GQL schema. + * + * To be replaced by auto-generated mappers implementations + */ +public class UpstreamLineagesMapper { + + public static final UpstreamLineagesMapper INSTANCE = new UpstreamLineagesMapper(); + + public static List map(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + return INSTANCE.apply(upstreamLineage); + } + + public List apply(@Nonnull final com.linkedin.dataset.UpstreamLineage upstreamLineage) { + final List result = new ArrayList<>(); + if (!upstreamLineage.hasFineGrainedLineages()) { + return result; + } + + for (FineGrainedLineage fineGrainedLineage : upstreamLineage.getFineGrainedLineages()) { + com.linkedin.datahub.graphql.generated.FineGrainedLineage resultEntry = new com.linkedin.datahub.graphql.generated.FineGrainedLineage(); + if (fineGrainedLineage.hasUpstreams()) { + resultEntry.setUpstreams(fineGrainedLineage.getUpstreams().stream() + .filter(entry -> entry.getEntityType().equals("schemaField")) + .map(entry -> mapDatasetSchemaField(entry)).collect( + Collectors.toList())); + } + if (fineGrainedLineage.hasDownstreams()) { + resultEntry.setDownstreams(fineGrainedLineage.getDownstreams().stream() + .filter(entry -> entry.getEntityType().equals("schemaField")) + .map(entry -> mapDatasetSchemaField(entry)).collect( + Collectors.toList())); + } + result.add(resultEntry); + } + return result; + } + + private static SchemaFieldRef mapDatasetSchemaField(final Urn schemaFieldUrn) { + return new SchemaFieldRef(schemaFieldUrn.getEntityKey().get(0), schemaFieldUrn.getEntityKey().get(1)); + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java index 62566043adef86..c4ef925e0d71c8 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dashboard/DashboardType.java @@ -192,7 +192,7 @@ public Dashboard update(@Nonnull String urn, @Nonnull DashboardUpdateInput input proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java index 57e03f16061c88..92c3fe90685824 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataflow/DataFlowType.java @@ -179,7 +179,7 @@ public DataFlow update(@Nonnull String urn, @Nonnull DataFlowUpdateInput input, proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java index 8a5d74faf93497..1200eecec7d532 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/datajob/DataJobType.java @@ -179,7 +179,7 @@ public DataJob update(@Nonnull String urn, @Nonnull DataJobUpdateInput input, @N proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java index 87b96f91aeda6e..6bb47da3a4dc1f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/DatasetType.java @@ -208,7 +208,7 @@ public List batchUpdate(@Nonnull BatchDatasetUpdateInput[] input, @Nonn final List urns = Arrays.stream(input).map(BatchDatasetUpdateInput::getUrn).collect(Collectors.toList()); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urns), e); } @@ -224,7 +224,7 @@ public Dataset update(@Nonnull String urn, @Nonnull DatasetUpdateInput input, @N proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java index e6677a688e78f3..4a36296a22e5d4 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/dataset/mappers/DatasetMapper.java @@ -23,6 +23,7 @@ import com.linkedin.datahub.graphql.types.common.mappers.SiblingsMapper; import com.linkedin.datahub.graphql.types.common.mappers.StatusMapper; import com.linkedin.datahub.graphql.types.common.mappers.CustomPropertiesMapper; +import com.linkedin.datahub.graphql.types.common.mappers.UpstreamLineagesMapper; import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; import com.linkedin.datahub.graphql.types.common.mappers.util.SystemMetadataUtils; import com.linkedin.datahub.graphql.types.domain.DomainAssociationMapper; @@ -32,6 +33,7 @@ import com.linkedin.dataset.DatasetDeprecation; import com.linkedin.dataset.DatasetProperties; import com.linkedin.dataset.EditableDatasetProperties; +import com.linkedin.dataset.UpstreamLineage; import com.linkedin.dataset.ViewProperties; import com.linkedin.domain.Domains; import com.linkedin.entity.EntityResponse; @@ -97,6 +99,8 @@ public Dataset apply(@Nonnull final EntityResponse entityResponse) { dataset.setDataPlatformInstance(DataPlatformInstanceAspectMapper.map(new DataPlatformInstance(dataMap)))); mappingHelper.mapToResult(SIBLINGS_ASPECT_NAME, (dataset, dataMap) -> dataset.setSiblings(SiblingsMapper.map(new Siblings(dataMap)))); + mappingHelper.mapToResult(UPSTREAM_LINEAGE_ASPECT_NAME, (dataset, dataMap) -> + dataset.setFineGrainedLineages(UpstreamLineagesMapper.map(new UpstreamLineage(dataMap)))); return mappingHelper.getResult(); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java index ba715d990e8b0c..a841439c3b926f 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/notebook/NotebookType.java @@ -175,7 +175,7 @@ public Notebook update(@Nonnull String urn, @Nonnull NotebookUpdateInput input, proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java new file mode 100644 index 00000000000000..791197c7d47e49 --- /dev/null +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/post/PostMapper.java @@ -0,0 +1,76 @@ +package com.linkedin.datahub.graphql.types.post; + +import com.linkedin.data.DataMap; +import com.linkedin.datahub.graphql.generated.AuditStamp; +import com.linkedin.datahub.graphql.generated.EntityType; +import com.linkedin.datahub.graphql.generated.Media; +import com.linkedin.datahub.graphql.generated.MediaType; +import com.linkedin.datahub.graphql.generated.Post; +import com.linkedin.datahub.graphql.generated.PostContent; +import com.linkedin.datahub.graphql.generated.PostContentType; +import com.linkedin.datahub.graphql.generated.PostType; +import com.linkedin.datahub.graphql.types.common.mappers.util.MappingHelper; +import com.linkedin.datahub.graphql.types.mappers.ModelMapper; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.post.PostInfo; +import javax.annotation.Nonnull; + +import static com.linkedin.metadata.Constants.*; + + +public class PostMapper implements ModelMapper { + + public static final PostMapper INSTANCE = new PostMapper(); + + public static Post map(@Nonnull final EntityResponse entityResponse) { + return INSTANCE.apply(entityResponse); + } + + @Override + public Post apply(@Nonnull final EntityResponse entityResponse) { + final Post result = new Post(); + + result.setUrn(entityResponse.getUrn().toString()); + result.setType(EntityType.POST); + EnvelopedAspectMap aspectMap = entityResponse.getAspects(); + MappingHelper mappingHelper = new MappingHelper<>(aspectMap, result); + mappingHelper.mapToResult(POST_INFO_ASPECT_NAME, this::mapPostInfo); + return mappingHelper.getResult(); + } + + private void mapPostInfo(@Nonnull Post post, @Nonnull DataMap dataMap) { + PostInfo postInfo = new PostInfo(dataMap); + post.setPostType(PostType.valueOf(postInfo.getType().toString())); + post.setContent(mapPostContent(postInfo.getContent())); + AuditStamp lastModified = new AuditStamp(); + lastModified.setTime(postInfo.getLastModified()); + post.setLastModified(lastModified); + } + + @Nonnull + private com.linkedin.datahub.graphql.generated.PostContent mapPostContent( + @Nonnull com.linkedin.post.PostContent postContent) { + PostContent result = new PostContent(); + result.setContentType(PostContentType.valueOf(postContent.getType().toString())); + result.setTitle(postContent.getTitle()); + if (postContent.hasDescription()) { + result.setDescription(postContent.getDescription()); + } + if (postContent.hasLink()) { + result.setLink(postContent.getLink().toString()); + } + if (postContent.hasMedia()) { + result.setMedia(mapPostMedia(postContent.getMedia())); + } + return result; + } + + @Nonnull + private Media mapPostMedia(@Nonnull com.linkedin.common.Media postMedia) { + Media result = new Media(); + result.setType(MediaType.valueOf(postMedia.getType().toString())); + result.setLocation(postMedia.getLocation().toString()); + return result; + } +} diff --git a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java index 41ae275f4242bf..9aace619650066 100644 --- a/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java +++ b/datahub-graphql-core/src/main/java/com/linkedin/datahub/graphql/types/tag/TagType.java @@ -132,7 +132,7 @@ public Tag update(@Nonnull String urn, @Nonnull TagUpdateInput input, @Nonnull Q final Collection proposals = TagUpdateInputMapper.map(input, actor); proposals.forEach(proposal -> proposal.setEntityUrn(UrnUtils.getUrn(urn))); try { - _entityClient.batchIngestProposals(proposals, context.getAuthentication()); + _entityClient.batchIngestProposals(proposals, context.getAuthentication(), false); } catch (RemoteInvocationException e) { throw new RuntimeException(String.format("Failed to write entity with urn %s", urn), e); } diff --git a/datahub-graphql-core/src/main/resources/entity.graphql b/datahub-graphql-core/src/main/resources/entity.graphql index 58613e0b92f743..95216efba1cf46 100644 --- a/datahub-graphql-core/src/main/resources/entity.graphql +++ b/datahub-graphql-core/src/main/resources/entity.graphql @@ -183,6 +183,11 @@ type Query { Get invite token """ getInviteToken(input: GetInviteTokenInput!): InviteToken + + """ + List all Posts + """ + listPosts(input: ListPostsInput!): ListPostsResult } """ @@ -518,6 +523,11 @@ type Mutation { Create invite token """ createInviteToken(input: CreateInviteTokenInput!): InviteToken + + """ + Create a post + """ + createPost(input: CreatePostInput!): Boolean } """ @@ -693,6 +703,16 @@ enum EntityType { A DataHub Role """ DATAHUB_ROLE + + """ + A DataHub Post + """ + POST + + """ + A Schema Field + """ + SCHEMA_FIELD } """ @@ -1138,6 +1158,16 @@ type Dataset implements EntityWithRelationships & Entity & BrowsableEntity { Metadata about the datasets siblings """ siblings: SiblingProperties + + """ + fine grained lineage + """ + fineGrainedLineages: [FineGrainedLineage!] +} + +type FineGrainedLineage { + upstreams: [SchemaFieldRef!] + downstreams: [SchemaFieldRef!] } """ @@ -9385,3 +9415,223 @@ input AcceptRoleInput { """ inviteToken: String! } + +""" +The type of post +""" +enum PostType { + """ + Posts on the home page + """ + HOME_PAGE_ANNOUNCEMENT, +} + +""" +The type of post +""" +enum PostContentType { + """ + Text content + """ + TEXT, + + """ + Link content + """ + LINK +} + +""" +The type of media +""" +enum MediaType { + """ + An image + """ + IMAGE +} + +""" +Input provided when creating a Post +""" +input CreatePostInput { + """ + The type of post + """ + postType: PostType! + + """ + The content of the post + """ + content: UpdatePostContentInput! +} + +""" +Input provided for filling in a post content +""" +input UpdatePostContentInput { + """ + The type of post content + """ + contentType: PostContentType! + + """ + The title of the post + """ + title: String! + + """ + Optional content of the post + """ + description: String + + """ + Optional link that the post is associated with + """ + link: String + + """ + Optional media contained in the post + """ + media: UpdateMediaInput +} + +""" +Input provided for filling in a post content +""" +input UpdateMediaInput { + """ + The type of media + """ + type: MediaType! + + """ + The location of the media (a URL) + """ + location: String! +} + +""" +Input provided when listing existing posts +""" +input ListPostsInput { + """ + The starting offset of the result set returned + """ + start: Int + + """ + The maximum number of Roles to be returned in the result set + """ + count: Int + + """ + Optional search query + """ + query: String +} + +""" +The result obtained when listing Posts +""" +type ListPostsResult { + """ + The starting offset of the result set returned + """ + start: Int! + + """ + The number of Roles in the returned result set + """ + count: Int! + + """ + The total number of Roles in the result set + """ + total: Int! + + """ + The Posts themselves + """ + posts: [Post!]! +} + +""" +Input provided when creating a Post +""" +type Post implements Entity { + """ + The primary key of the Post + """ + urn: String! + + """ + The standard Entity Type + """ + type: EntityType! + + """ + Granular API for querying edges extending from the Post + """ + relationships(input: RelationshipsInput!): EntityRelationshipsResult + + """ + The type of post + """ + postType: PostType! + + """ + The content of the post + """ + content: PostContent! + + """ + When the post was last modified + """ + lastModified: AuditStamp! +} + +""" +Post content +""" +type PostContent { + """ + The type of post content + """ + contentType: PostContentType! + + """ + The title of the post + """ + title: String! + + """ + Optional content of the post + """ + description: String + + """ + Optional link that the post is associated with + """ + link: String + + """ + Optional media contained in the post + """ + media: Media +} + +""" +Media content +""" +type Media { + """ + The type of media + """ + type: MediaType! + + """ + The location of the media (a URL) + """ + location: String! +} diff --git a/datahub-graphql-core/src/main/resources/recommendation.graphql b/datahub-graphql-core/src/main/resources/recommendation.graphql index 5f1340ba452542..4e4bd14052aff9 100644 --- a/datahub-graphql-core/src/main/resources/recommendation.graphql +++ b/datahub-graphql-core/src/main/resources/recommendation.graphql @@ -217,7 +217,7 @@ type SearchParams { """ Entity types to be searched. If this is not provided, all entities will be searched. """ - types: [EntityType!] + types: [EntityType!] """ Search query @@ -237,12 +237,22 @@ type Filter { """ Name of field to filter by """ - field: String! + field: String! - """ - Value of the field to filter by - """ - value: String! + """ + Values, one of which the intended field should match. + """ + values: [String!]! + + """ + If the filter should or should not be matched + """ + negated: Boolean + + """ + Condition for the values. How to If unset, assumed to be equality + """ + condition: FilterOperator } """ @@ -269,4 +279,4 @@ type ContentParams { Number of entities corresponding to the recommended content """ count: Long -} \ No newline at end of file +} diff --git a/datahub-graphql-core/src/main/resources/search.graphql b/datahub-graphql-core/src/main/resources/search.graphql index cf409dc29a6e9f..b57de93da28a37 100644 --- a/datahub-graphql-core/src/main/resources/search.graphql +++ b/datahub-graphql-core/src/main/resources/search.graphql @@ -65,9 +65,15 @@ input SearchInput { count: Int """ - Facet filters to apply to search results + Deprecated in favor of the more expressive orFilters field + Facet filters to apply to search results. These will be 'AND'-ed together. """ - filters: [FacetFilterInput!] + filters: [FacetFilterInput!] @deprecated(reason: "Use `orFilters`- they are more expressive") + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters) + """ + orFilters: [OrFilter!] } """ @@ -95,9 +101,15 @@ input SearchAcrossEntitiesInput { count: Int """ - Faceted filters applied to search results + Deprecated in favor of the more expressive orFilters field + Facet filters to apply to search results. These will be 'AND'-ed together. """ - filters: [FacetFilterInput!] + filters: [FacetFilterInput!] @deprecated(reason: "Use `orFilters`- they are more expressive") + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters) + """ + orFilters: [OrFilter!] } """ @@ -135,9 +147,25 @@ input SearchAcrossLineageInput { count: Int """ - Faceted filters applied to search results + Deprecated in favor of the more expressive orFilters field + Facet filters to apply to search results. These will be 'AND'-ed together. """ - filters: [FacetFilterInput!] + filters: [FacetFilterInput!] @deprecated(reason: "Use `orFilters`- they are more expressive") + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters) + """ + orFilters: [OrFilter!] +} + +""" +A list of disjunctive criterion for the filter. (or operation to combine filters) +""" +input OrFilter { + """ + A list of and criteria the filter applies to the query + """ + and: [FacetFilterInput!] } """ @@ -150,14 +178,9 @@ input FacetFilterInput { field: String! """ - Value of the field to filter by (soon to be deprecated) + Values, one of which the intended field should match. """ - value: String! - - """ - Values of the field to filter by - """ - values: [String!] + values: [String!]! """ If the filter should or should not be matched @@ -165,12 +188,12 @@ input FacetFilterInput { negated: Boolean """ - Condition for the values. If unset, assumed to be equality + Condition for the values. How to If unset, assumed to be equality """ - condition: SearchCondition + condition: FilterOperator } -enum SearchCondition { +enum FilterOperator { """ Represent the relation: String field contains value, e.g. name contains Profile """ @@ -508,9 +531,15 @@ input BrowseInput { count: Int """ - Faceted filters applied to browse results + Deprecated in favor of the more expressive orFilters field + Facet filters to apply to search results. These will be 'AND'-ed together. """ - filters: [FacetFilterInput!] + filters: [FacetFilterInput!] @deprecated(reason: "Use `orFilters`- they are more expressive") + + """ + A list of disjunctive criterion for the filter. (or operation to combine filters) + """ + orFilters: [OrFilter!] } """ diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java index ef0cc566c575ec..e93f48336e8a68 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/TestUtils.java @@ -3,6 +3,9 @@ import com.datahub.authentication.Authentication; import com.datahub.authorization.AuthorizationResult; import com.datahub.authorization.Authorizer; +import com.linkedin.common.AuditStamp; +import com.linkedin.metadata.entity.EntityService; +import com.linkedin.mxe.MetadataChangeProposal; import org.mockito.Mockito; @@ -36,5 +39,27 @@ public static QueryContext getMockDenyContext() { return mockContext; } + public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations, MetadataChangeProposal proposal) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + Mockito.eq(proposal), + Mockito.any(AuditStamp.class), + Mockito.eq(false) + ); + } + + public static void verifyIngestProposal(EntityService mockService, int numberOfInvocations) { + Mockito.verify(mockService, Mockito.times(numberOfInvocations)).ingestProposal( + Mockito.any(MetadataChangeProposal.class), + Mockito.any(AuditStamp.class), + Mockito.eq(false) + ); + } + + public static void verifyNoIngestProposal(EntityService mockService) { + Mockito.verify(mockService, Mockito.times(0)).ingestProposal( + Mockito.any(), + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); + } + private TestUtils() { } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java index 1ef3c101e6c2a4..8c23335b7e9d3c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/auth/ListAccessTokensResolverTest.java @@ -6,10 +6,10 @@ import com.linkedin.datahub.graphql.generated.FacetFilterInput; import com.linkedin.datahub.graphql.generated.ListAccessTokenInput; import com.linkedin.datahub.graphql.generated.ListAccessTokenResult; -import com.linkedin.datahub.graphql.generated.SearchCondition; import com.linkedin.entity.client.EntityClient; import com.linkedin.metadata.Constants; import graphql.schema.DataFetchingEnvironment; +import java.util.Collections; import junit.framework.TestCase; import org.mockito.Mockito; @@ -27,15 +27,18 @@ public void testGetSuccess() throws Exception { final ListAccessTokenInput input = new ListAccessTokenInput(); input.setStart(0); input.setCount(100); - final ImmutableList filters = ImmutableList.of(new FacetFilterInput("actor", - "urn:li:corpuser:test", ImmutableList.of("urn:li:corpuser:test"), false, SearchCondition.EQUAL)); + FacetFilterInput filter = new FacetFilterInput(); + filter.setField("actor"); + filter.setValues(ImmutableList.of("urn:li:corpuser:test")); + final ImmutableList filters = ImmutableList.of(filter); + input.setFilters(filters); Mockito.when(mockEnv.getArgument(Mockito.eq("input"))).thenReturn(input); final EntityClient mockClient = Mockito.mock(EntityClient.class); Mockito.when(mockClient.filter( Mockito.eq(Constants.ACCESS_TOKEN_ENTITY_NAME), - Mockito.eq(buildFilter(filters)), + Mockito.eq(buildFilter(filters, Collections.emptyList())), Mockito.notNull(), Mockito.eq(input.getStart()), Mockito.eq(input.getCount()), diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java index 2fe927100d55cb..12cbf21b136663 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/delete/BatchUpdateSoftDeletedResolverTest.java @@ -66,10 +66,7 @@ public void testGetSuccessNoExistingStatus() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newStatus)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -78,10 +75,7 @@ public void testGetSuccessNoExistingStatus() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newStatus)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); } @Test @@ -124,10 +118,7 @@ public void testGetSuccessExistingStatus() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newStatus)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -136,10 +127,7 @@ public void testGetSuccessExistingStatus() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newStatus)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); } @Test @@ -171,9 +159,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -191,9 +177,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -202,7 +186,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchUpdateSoftDeletedResolver resolver = new BatchUpdateSoftDeletedResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java index 49c24770333c73..36909eb075d9bb 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/deprecation/BatchUpdateDeprecationResolverTest.java @@ -73,10 +73,7 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newDeprecation)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -85,10 +82,7 @@ public void testGetSuccessNoExistingDeprecation() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newDeprecation)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); } @Test @@ -140,10 +134,7 @@ public void testGetSuccessExistingDeprecation() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newDeprecation)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -152,10 +143,7 @@ public void testGetSuccessExistingDeprecation() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newDeprecation)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); } @Test @@ -188,9 +176,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -209,9 +195,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -220,7 +204,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchUpdateDeprecationResolver resolver = new BatchUpdateDeprecationResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java index 756e085593c26c..fe3bfb3dec66b2 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/BatchSetDomainResolverTest.java @@ -77,10 +77,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -89,10 +86,7 @@ public void testGetSuccessNoExistingDomains() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) @@ -147,10 +141,7 @@ public void testGetSuccessExistingDomains() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -159,10 +150,7 @@ public void testGetSuccessExistingDomains() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_DOMAIN_2_URN)) @@ -215,10 +203,7 @@ public void testGetSuccessUnsetDomains() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -227,10 +212,7 @@ public void testGetSuccessUnsetDomains() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newDomains)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); } @Test @@ -258,9 +240,7 @@ public void testGetFailureDomainDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -294,9 +274,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -315,9 +293,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -326,7 +302,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchSetDomainResolver resolver = new BatchSetDomainResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java index 8fce428519c4b5..2cfab63cd4b076 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/domain/CreateDomainResolverTest.java @@ -9,6 +9,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.DomainKey; import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.r2.RemoteInvocationException; import graphql.schema.DataFetchingEnvironment; @@ -27,12 +28,16 @@ public class CreateDomainResolverTest { "test-name", "test-description" ); + private static final String TEST_ENTITY_URN = "urn:li:dataset:(urn:li:dataPlatform:mysql,my-test,PROD)"; + private static final String TEST_TAG_1_URN = "urn:li:tag:test-id-1"; + private static final String TEST_TAG_2_URN = "urn:li:tag:test-id-2"; @Test public void testGetSuccess() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - CreateDomainResolver resolver = new CreateDomainResolver(mockClient); + EntityService mockService = Mockito.mock(EntityService.class); + CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver QueryContext mockContext = getMockAllowContext(); @@ -65,7 +70,8 @@ public void testGetSuccess() throws Exception { public void testGetUnauthorized() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); - CreateDomainResolver resolver = new CreateDomainResolver(mockClient); + EntityService mockService = Mockito.mock(EntityService.class); + CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); @@ -83,10 +89,11 @@ public void testGetUnauthorized() throws Exception { public void testGetEntityClientException() throws Exception { // Create resolver EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityService mockService = Mockito.mock(EntityService.class); Mockito.doThrow(RemoteInvocationException.class).when(mockClient).ingestProposal( Mockito.any(), Mockito.any(Authentication.class)); - CreateDomainResolver resolver = new CreateDomainResolver(mockClient); + CreateDomainResolver resolver = new CreateDomainResolver(mockClient, mockService); // Execute resolver DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java index 451faf9bc8e382..6bbf4f47975607 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/AddRelatedTermsResolverTest.java @@ -1,7 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.glossary; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.common.urn.UrnUtils; import com.linkedin.datahub.graphql.QueryContext; @@ -9,7 +8,6 @@ import com.linkedin.datahub.graphql.generated.TermRelationshipType; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.ExecutionException; @@ -58,10 +56,7 @@ public void testGetSuccessIsRelatedNonExistent() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); @@ -93,10 +88,7 @@ public void testGetSuccessHasRelatedNonExistent() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); @@ -125,10 +117,7 @@ public void testGetFailAddSelfAsRelatedTerm() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); } @Test @@ -148,10 +137,7 @@ public void testGetFailAddNonTermAsRelatedTerm() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); } @Test @@ -172,10 +158,7 @@ public void testGetFailAddNonExistentTermAsRelatedTerm() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); } @Test @@ -196,10 +179,7 @@ public void testGetFailAddToNonExistentUrn() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); } @Test @@ -220,10 +200,7 @@ public void testGetFailAddToNonTerm() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); } @Test @@ -246,10 +223,7 @@ public void testFailNoPermissions() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java index 8d0cd0fd37b3c3..7ee810f3c04542 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryNodeResolverTest.java @@ -10,6 +10,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.GlossaryNodeKey; import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; @@ -74,10 +75,11 @@ private MetadataChangeProposal setupTest( @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityService mockService = Mockito.mock(EntityService.class); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); - CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient); + CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( @@ -89,10 +91,11 @@ public void testGetSuccess() throws Exception { @Test public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityService mockService = Mockito.mock(EntityService.class); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); - CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient); + CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( @@ -104,10 +107,11 @@ public void testGetSuccessNoDescription() throws Exception { @Test public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityService mockService = Mockito.mock(EntityService.class); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); - CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient); + CreateGlossaryNodeResolver resolver = new CreateGlossaryNodeResolver(mockClient, mockService); resolver.get(mockEnv).get(); Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java index f9aac997d2507c..a8b640898929ff 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/CreateGlossaryTermResolverTest.java @@ -10,6 +10,7 @@ import com.linkedin.metadata.Constants; import com.linkedin.metadata.key.GlossaryTermKey; import com.linkedin.metadata.utils.GenericRecordUtils; +import com.linkedin.metadata.entity.EntityService; import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; @@ -75,10 +76,11 @@ private MetadataChangeProposal setupTest( @Test public void testGetSuccess() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityService mockService = Mockito.mock(EntityService.class); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT, "test-description", parentNodeUrn); - CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient); + CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( @@ -90,10 +92,11 @@ public void testGetSuccess() throws Exception { @Test public void testGetSuccessNoDescription() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityService mockService = Mockito.mock(EntityService.class); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_DESCRIPTION, "", parentNodeUrn); - CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient); + CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( @@ -105,10 +108,11 @@ public void testGetSuccessNoDescription() throws Exception { @Test public void testGetSuccessNoParentNode() throws Exception { EntityClient mockClient = Mockito.mock(EntityClient.class); + EntityService mockService = Mockito.mock(EntityService.class); DataFetchingEnvironment mockEnv = Mockito.mock(DataFetchingEnvironment.class); final MetadataChangeProposal proposal = setupTest(mockEnv, TEST_INPUT_NO_PARENT_NODE, "test-description", null); - CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient); + CreateGlossaryTermResolver resolver = new CreateGlossaryTermResolver(mockClient, mockService); resolver.get(mockEnv).get(); Mockito.verify(mockClient, Mockito.times(1)).ingestProposal( diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java index 6a704c2b61c127..dd54d7f9835c1d 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/RemoveRelatedTermsResolverTest.java @@ -1,7 +1,6 @@ package com.linkedin.datahub.graphql.resolvers.glossary; import com.google.common.collect.ImmutableList; -import com.linkedin.common.AuditStamp; import com.linkedin.common.GlossaryTermUrnArray; import com.linkedin.common.urn.GlossaryTermUrn; import com.linkedin.common.urn.Urn; @@ -12,7 +11,6 @@ import com.linkedin.glossary.GlossaryRelatedTerms; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import org.mockito.Mockito; import org.testng.annotations.Test; @@ -20,8 +18,7 @@ import java.util.Arrays; import java.util.concurrent.ExecutionException; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; -import static com.linkedin.datahub.graphql.TestUtils.getMockDenyContext; +import static com.linkedin.datahub.graphql.TestUtils.*; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -57,10 +54,7 @@ public void testGetSuccessIsA() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); @@ -92,10 +86,7 @@ public void testGetSuccessHasA() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); @@ -123,10 +114,7 @@ public void testFailAspectDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); } @Test @@ -155,10 +143,7 @@ public void testFailNoPermissions() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(ExecutionException.class, () -> resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyNoIngestProposal(mockService); Mockito.verify(mockService, Mockito.times(0)).exists( Mockito.eq(Urn.createFromString(TEST_ENTITY_URN)) ); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java index e3edfe0efe1342..1c037ea04ef25c 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateNameResolverTest.java @@ -21,7 +21,7 @@ import java.util.concurrent.CompletionException; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.*; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -71,10 +71,7 @@ public void testGetSuccess() throws Exception { final MetadataChangeProposal proposal = setupTests(mockEnv, mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any() - ); + verifyIngestProposal(mockService, 1, proposal); } @Test @@ -108,10 +105,7 @@ public void testGetSuccessForNode() throws Exception { UpdateNameResolver resolver = new UpdateNameResolver(mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any() - ); + verifyIngestProposal(mockService, 1, proposal); } @Test @@ -145,10 +139,7 @@ public void testGetSuccessForDomain() throws Exception { UpdateNameResolver resolver = new UpdateNameResolver(mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any() - ); + verifyIngestProposal(mockService, 1, proposal); } @Test @@ -162,8 +153,6 @@ public void testGetFailureEntityDoesNotExist() throws Exception { setupTests(mockEnv, mockService); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any()); + verifyNoIngestProposal(mockService); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java index 1cba0a86b97632..b9161996e8e354 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/glossary/UpdateParentNodeResolverTest.java @@ -20,7 +20,7 @@ import java.net.URISyntaxException; -import static com.linkedin.datahub.graphql.TestUtils.getMockAllowContext; +import static com.linkedin.datahub.graphql.TestUtils.*; import static org.testng.Assert.assertThrows; import static org.testng.Assert.assertTrue; @@ -72,10 +72,7 @@ public void testGetSuccess() throws Exception { final MetadataChangeProposal proposal = setupTests(mockEnv, mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any() - ); + verifyIngestProposal(mockService, 1, proposal); } @Test @@ -111,10 +108,7 @@ public void testGetSuccessForNode() throws Exception { UpdateParentNodeResolver resolver = new UpdateParentNodeResolver(mockService); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any() - ); + verifyIngestProposal(mockService, 1, proposal); } @Test @@ -129,9 +123,7 @@ public void testGetFailureEntityDoesNotExist() throws Exception { setupTests(mockEnv, mockService); assertThrows(IllegalArgumentException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any()); + verifyNoIngestProposal(mockService); } @Test @@ -146,9 +138,7 @@ public void testGetFailureNodeDoesNotExist() throws Exception { setupTests(mockEnv, mockService); assertThrows(IllegalArgumentException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any()); + verifyNoIngestProposal(mockService); } @Test @@ -163,8 +153,6 @@ public void testGetFailureParentIsNotNode() throws Exception { setupTests(mockEnv, mockService); assertThrows(URISyntaxException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any()); + verifyNoIngestProposal(mockService); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java index 04ed7720333151..61dd6c678e6e0f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/MutableTypeBatchResolverTest.java @@ -144,7 +144,7 @@ public void testGetSuccess() throws Exception { List result = resolver.get(mockEnv).join(); ArgumentCaptor> changeProposalCaptor = ArgumentCaptor.forClass((Class) Collection.class); - Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals(changeProposalCaptor.capture(), Mockito.any()); + Mockito.verify(mockClient, Mockito.times(1)).batchIngestProposals(changeProposalCaptor.capture(), Mockito.any(), Mockito.eq(false)); Mockito.verify(mockClient, Mockito.times(1)).batchGetV2( Mockito.eq(Constants.DATASET_ENTITY_NAME), Mockito.eq(ImmutableSet.of(datasetUrn1, datasetUrn2)), diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java index d21e8a8e31d565..605f1e4142e187 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/mutate/UpdateUserSettingResolverTest.java @@ -1,6 +1,5 @@ package com.linkedin.datahub.graphql.resolvers.mutate; -import com.linkedin.common.AuditStamp; import com.linkedin.common.urn.Urn; import com.linkedin.datahub.graphql.QueryContext; import com.linkedin.datahub.graphql.generated.UpdateUserSettingInput; @@ -47,9 +46,6 @@ public void testWriteCorpUserSettings() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(newSettings)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal); } } diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java index 16a8e27b7559ab..d4bec4adb81a08 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/AddOwnersResolverTest.java @@ -12,7 +12,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.AddOwnersResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -56,10 +55,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) @@ -98,10 +94,7 @@ public void testGetSuccessExistingOwners() throws Exception { assertTrue(resolver.get(mockEnv).get()); // Unable to easily validate exact payload due to the injected timestamp - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_OWNER_1_URN)) @@ -136,9 +129,7 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -165,9 +156,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -185,9 +174,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -196,7 +183,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); AddOwnersResolver resolver = new AddOwnersResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java index 43121fa592fc92..3a846c8f27c715 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchAddOwnersResolverTest.java @@ -74,10 +74,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(), // Ownership has a dynamically generated timestamp - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) @@ -133,10 +130,7 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(), // Ownership has a dynamically generated timestamp - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_OWNER_URN_1)) @@ -180,9 +174,7 @@ public void testGetFailureOwnerDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -224,9 +216,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -253,9 +243,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -264,7 +252,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchAddOwnersResolver resolver = new BatchAddOwnersResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java index ac4e0a7cdbef63..6dad703929e0cf 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/owner/BatchRemoveOwnersResolverTest.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveOwnersResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -67,10 +66,7 @@ public void testGetSuccessNoExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(), // Ownership has a dynamically generated timestamp - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); } @Test @@ -116,10 +112,7 @@ public void testGetSuccessExistingOwners() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); } @Test @@ -154,9 +147,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -176,9 +167,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -187,7 +176,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchRemoveOwnersResolver resolver = new BatchRemoveOwnersResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java new file mode 100644 index 00000000000000..b56d897a468ba8 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/CreatePostResolverTest.java @@ -0,0 +1,91 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import com.datahub.authentication.Authentication; +import com.datahub.authentication.post.PostService; +import com.linkedin.common.Media; +import com.linkedin.common.url.Url; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.CreatePostInput; +import com.linkedin.datahub.graphql.generated.MediaType; +import com.linkedin.datahub.graphql.generated.PostContentType; +import com.linkedin.datahub.graphql.generated.PostType; +import com.linkedin.datahub.graphql.generated.UpdateMediaInput; +import com.linkedin.datahub.graphql.generated.UpdatePostContentInput; +import graphql.schema.DataFetchingEnvironment; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + + +public class CreatePostResolverTest { + private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; + private static final String POST_MEDIA_LOCATION = + "https://datahubproject.io/img/datahub-logo-color-light-horizontal.svg"; + private static final PostContentType POST_CONTENT_TYPE = PostContentType.LINK; + private static final String POST_TITLE = "title"; + private static final String POST_DESCRIPTION = "description"; + private static final String POST_LINK = "https://datahubproject.io"; + private PostService _postService; + private CreatePostResolver _resolver; + private DataFetchingEnvironment _dataFetchingEnvironment; + private Authentication _authentication; + + @BeforeMethod + public void setupTest() throws Exception { + _postService = mock(PostService.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _authentication = mock(Authentication.class); + + _resolver = new CreatePostResolver(_postService); + } + + @Test + public void testNotAuthorizedFails() { + QueryContext mockContext = getMockDenyContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + + assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); + } + + @Test + public void testCreatePost() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + when(mockContext.getAuthentication()).thenReturn(_authentication); + + UpdateMediaInput media = new UpdateMediaInput(); + media.setType(POST_MEDIA_TYPE); + media.setLocation(POST_MEDIA_LOCATION); + Media mediaObj = new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION)); + when(_postService.mapMedia(eq(POST_MEDIA_TYPE.toString()), eq(POST_MEDIA_LOCATION))).thenReturn(mediaObj); + + UpdatePostContentInput content = new UpdatePostContentInput(); + content.setTitle(POST_TITLE); + content.setDescription(POST_DESCRIPTION); + content.setLink(POST_LINK); + content.setContentType(POST_CONTENT_TYPE); + content.setMedia(media); + com.linkedin.post.PostContent postContentObj = new com.linkedin.post.PostContent().setType( + com.linkedin.post.PostContentType.valueOf(POST_CONTENT_TYPE.toString())) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(new Media().setType(com.linkedin.common.MediaType.valueOf(POST_MEDIA_TYPE.toString())) + .setLocation(new Url(POST_MEDIA_LOCATION))); + when(_postService.mapPostContent(eq(POST_CONTENT_TYPE.toString()), eq(POST_TITLE), eq(POST_DESCRIPTION), + eq(POST_LINK), any(Media.class))).thenReturn(postContentObj); + + CreatePostInput input = new CreatePostInput(); + input.setPostType(PostType.HOME_PAGE_ANNOUNCEMENT); + input.setContent(content); + when(_dataFetchingEnvironment.getArgument(eq("input"))).thenReturn(input); + when(_postService.createPost(eq(PostType.HOME_PAGE_ANNOUNCEMENT.toString()), eq(postContentObj), + eq(_authentication))).thenReturn(true); + + assertTrue(_resolver.get(_dataFetchingEnvironment).join()); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java new file mode 100644 index 00000000000000..b4bec3ae9b3051 --- /dev/null +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/post/ListPostsResolverTest.java @@ -0,0 +1,120 @@ +package com.linkedin.datahub.graphql.resolvers.post; + +import com.datahub.authentication.Authentication; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.linkedin.common.Media; +import com.linkedin.common.MediaType; +import com.linkedin.common.url.Url; +import com.linkedin.common.urn.Urn; +import com.linkedin.datahub.graphql.QueryContext; +import com.linkedin.datahub.graphql.generated.ListPostsInput; +import com.linkedin.datahub.graphql.generated.ListPostsResult; +import com.linkedin.entity.Aspect; +import com.linkedin.entity.EntityResponse; +import com.linkedin.entity.EnvelopedAspect; +import com.linkedin.entity.EnvelopedAspectMap; +import com.linkedin.entity.client.EntityClient; +import com.linkedin.metadata.search.SearchEntity; +import com.linkedin.metadata.search.SearchEntityArray; +import com.linkedin.metadata.search.SearchResult; +import com.linkedin.metadata.search.SearchResultMetadata; +import com.linkedin.policy.DataHubRoleInfo; +import com.linkedin.post.PostContent; +import com.linkedin.post.PostContentType; +import com.linkedin.post.PostInfo; +import com.linkedin.post.PostType; +import graphql.schema.DataFetchingEnvironment; +import java.net.URISyntaxException; +import java.util.Map; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import static com.linkedin.datahub.graphql.TestUtils.*; +import static com.linkedin.metadata.Constants.*; +import static org.mockito.Mockito.*; +import static org.testng.Assert.*; + + +public class ListPostsResolverTest { + private static Map _entityResponseMap; + private static final String POST_URN_STRING = "urn:li:post:examplePost"; + private static final MediaType POST_MEDIA_TYPE = MediaType.IMAGE; + private static final String POST_MEDIA_LOCATION = + "https://datahubproject.io/img/datahub-logo-color-light-horizontal.svg"; + private static final PostContentType POST_CONTENT_TYPE = PostContentType.LINK; + private static final String POST_TITLE = "title"; + private static final String POST_DESCRIPTION = "description"; + private static final String POST_LINK = "https://datahubproject.io"; + private static final Media MEDIA = new Media().setType(POST_MEDIA_TYPE).setLocation(new Url(POST_MEDIA_LOCATION)); + private static final PostContent POST_CONTENT = new PostContent().setType(POST_CONTENT_TYPE) + .setTitle(POST_TITLE) + .setDescription(POST_DESCRIPTION) + .setLink(new Url(POST_LINK)) + .setMedia(MEDIA); + private static final PostType POST_TYPE = PostType.HOME_PAGE_ANNOUNCEMENT; + + private EntityClient _entityClient; + private ListPostsResolver _resolver; + private DataFetchingEnvironment _dataFetchingEnvironment; + private Authentication _authentication; + + private Map getMockPostsEntityResponse() throws URISyntaxException { + Urn postUrn = Urn.createFromString(POST_URN_STRING); + + EntityResponse entityResponse = new EntityResponse().setUrn(postUrn); + PostInfo postInfo = new PostInfo(); + postInfo.setType(POST_TYPE); + postInfo.setContent(POST_CONTENT); + DataHubRoleInfo dataHubRoleInfo = new DataHubRoleInfo(); + dataHubRoleInfo.setDescription(postUrn.toString()); + dataHubRoleInfo.setName(postUrn.toString()); + entityResponse.setAspects(new EnvelopedAspectMap(ImmutableMap.of(DATAHUB_ROLE_INFO_ASPECT_NAME, + new EnvelopedAspect().setValue(new Aspect(dataHubRoleInfo.data()))))); + + return ImmutableMap.of(postUrn, entityResponse); + } + + @BeforeMethod + public void setupTest() throws Exception { + _entityResponseMap = getMockPostsEntityResponse(); + + _entityClient = mock(EntityClient.class); + _dataFetchingEnvironment = mock(DataFetchingEnvironment.class); + _authentication = mock(Authentication.class); + + _resolver = new ListPostsResolver(_entityClient); + } + + @Test + public void testNotAuthorizedFails() { + QueryContext mockContext = getMockDenyContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + + assertThrows(() -> _resolver.get(_dataFetchingEnvironment).join()); + } + + @Test + public void testListPosts() throws Exception { + QueryContext mockContext = getMockAllowContext(); + when(_dataFetchingEnvironment.getContext()).thenReturn(mockContext); + when(mockContext.getAuthentication()).thenReturn(_authentication); + + ListPostsInput input = new ListPostsInput(); + when(_dataFetchingEnvironment.getArgument("input")).thenReturn(input); + final SearchResult roleSearchResult = + new SearchResult().setMetadata(new SearchResultMetadata()).setFrom(0).setPageSize(10).setNumEntities(1); + roleSearchResult.setEntities( + new SearchEntityArray(ImmutableList.of(new SearchEntity().setEntity(Urn.createFromString(POST_URN_STRING))))); + + when(_entityClient.search(eq(POST_ENTITY_NAME), any(), eq(null), any(), anyInt(), anyInt(), + eq(_authentication))).thenReturn(roleSearchResult); + when(_entityClient.batchGetV2(eq(POST_ENTITY_NAME), any(), any(), any())).thenReturn(_entityResponseMap); + + ListPostsResult result = _resolver.get(_dataFetchingEnvironment).join(); + assertEquals(result.getStart(), 0); + assertEquals(result.getCount(), 10); + assertEquals(result.getTotal(), 1); + assertEquals(result.getPosts().size(), 1); + } +} diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java index 1b1ead881574d5..e0769668be0232 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/AddTagsResolverTest.java @@ -70,10 +70,7 @@ public void testGetSuccessNoExistingTags() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(newTags)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) @@ -127,10 +124,7 @@ public void testGetSuccessExistingTags() throws Exception { proposal.setAspect(GenericRecordUtils.serializeAspect(newTags)); proposal.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) @@ -166,9 +160,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -196,9 +188,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -217,9 +207,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -228,7 +216,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.eq(false)); AddTagsResolver resolver = new AddTagsResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java index 0eb361138002d3..4991f1b59a0f9f 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchAddTagsResolverTest.java @@ -83,10 +83,7 @@ public void testGetSuccessNoExistingTags() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newTags)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -95,10 +92,7 @@ public void testGetSuccessNoExistingTags() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newTags)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) @@ -162,10 +156,7 @@ public void testGetSuccessExistingTags() throws Exception { proposal1.setAspect(GenericRecordUtils.serializeAspect(newTags)); proposal1.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal1); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); proposal2.setEntityUrn(Urn.createFromString(TEST_ENTITY_URN_2)); @@ -174,10 +165,7 @@ public void testGetSuccessExistingTags() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(newTags)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_TAG_1_URN)) @@ -217,7 +205,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -256,7 +244,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -280,7 +268,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -289,7 +277,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchAddTagsResolver resolver = new BatchAddTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java index 124927ff0ae7bc..7d3d87636142a4 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/tag/BatchRemoveTagsResolverTest.java @@ -81,7 +81,7 @@ public void testGetSuccessNoExistingTags() throws Exception { Mockito.verify(mockService, Mockito.times(1)).ingestProposal( Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) + Mockito.any(AuditStamp.class), Mockito.eq(false) ); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); @@ -91,10 +91,7 @@ public void testGetSuccessNoExistingTags() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(emptyTags)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); } @Test @@ -154,7 +151,7 @@ public void testGetSuccessExistingTags() throws Exception { Mockito.verify(mockService, Mockito.times(1)).ingestProposal( Mockito.eq(proposal1), - Mockito.any(AuditStamp.class) + Mockito.any(AuditStamp.class), Mockito.eq(false) ); final MetadataChangeProposal proposal2 = new MetadataChangeProposal(); @@ -164,10 +161,7 @@ public void testGetSuccessExistingTags() throws Exception { proposal2.setAspect(GenericRecordUtils.serializeAspect(emptyTags)); proposal2.setChangeType(ChangeType.UPSERT); - Mockito.verify(mockService, Mockito.times(1)).ingestProposal( - Mockito.eq(proposal2), - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 1, proposal2); } @Test @@ -206,7 +200,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -230,7 +224,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -239,7 +233,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchRemoveTagsResolver resolver = new BatchRemoveTagsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java index 2ac8842d9590e1..c9ec92001f89bc 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/AddTermsResolverTest.java @@ -59,7 +59,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)).ingestProposal( Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) + Mockito.any(AuditStamp.class), Mockito.eq(false) ); Mockito.verify(mockService, Mockito.times(1)).exists( @@ -105,7 +105,7 @@ public void testGetSuccessExistingTerms() throws Exception { // Unable to easily validate exact payload due to the injected timestamp Mockito.verify(mockService, Mockito.times(1)).ingestProposal( Mockito.any(MetadataChangeProposal.class), - Mockito.any(AuditStamp.class) + Mockito.any(AuditStamp.class), Mockito.eq(false) ); Mockito.verify(mockService, Mockito.times(1)).exists( @@ -144,7 +144,7 @@ public void testGetFailureTermDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -174,7 +174,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -195,7 +195,7 @@ public void testGetUnauthorized() throws Exception { assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); Mockito.verify(mockService, Mockito.times(0)).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); } @Test @@ -204,7 +204,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); AddTermsResolver resolver = new AddTermsResolver(Mockito.mock(EntityService.class)); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java index 78655daf13776d..dfe1394635c4ed 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchAddTermsResolverTest.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchAddTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -67,10 +66,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), // glossary terms contains a dynamically generated audit stamp - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) @@ -122,10 +118,7 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), // glossary terms contains a dynamically generated audit stamp - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); Mockito.verify(mockService, Mockito.times(1)).exists( Mockito.eq(Urn.createFromString(TEST_GLOSSARY_TERM_1_URN)) @@ -162,9 +155,7 @@ public void testGetFailureTagDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -200,9 +191,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -223,9 +212,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -234,7 +221,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchAddTermsResolver resolver = new BatchAddTermsResolver(mockService); diff --git a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java index cc5d825ac5ee56..dcc8659c1baf3b 100644 --- a/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java +++ b/datahub-graphql-core/src/test/java/com/linkedin/datahub/graphql/resolvers/term/BatchRemoveTermsResolverTest.java @@ -14,7 +14,6 @@ import com.linkedin.datahub.graphql.resolvers.mutate.BatchRemoveTermsResolver; import com.linkedin.metadata.Constants; import com.linkedin.metadata.entity.EntityService; -import com.linkedin.mxe.MetadataChangeProposal; import graphql.schema.DataFetchingEnvironment; import java.util.concurrent.CompletionException; import org.mockito.Mockito; @@ -67,10 +66,7 @@ public void testGetSuccessNoExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), // Glossary terms contains dynamically generated audit stamp - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); } @Test @@ -119,10 +115,7 @@ public void testGetSuccessExistingTerms() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertTrue(resolver.get(mockEnv).get()); - Mockito.verify(mockService, Mockito.times(2)).ingestProposal( - Mockito.any(MetadataChangeProposal.class), // Glossary terms contains dynamically generated audit stamp - Mockito.any(AuditStamp.class) - ); + verifyIngestProposal(mockService, 2); } @Test @@ -159,9 +152,7 @@ public void testGetFailureResourceDoesNotExist() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -183,9 +174,7 @@ public void testGetUnauthorized() throws Exception { Mockito.when(mockEnv.getContext()).thenReturn(mockContext); assertThrows(CompletionException.class, () -> resolver.get(mockEnv).join()); - Mockito.verify(mockService, Mockito.times(0)).ingestProposal( - Mockito.any(), - Mockito.any(AuditStamp.class)); + verifyNoIngestProposal(mockService); } @Test @@ -194,7 +183,7 @@ public void testGetEntityClientException() throws Exception { Mockito.doThrow(RuntimeException.class).when(mockService).ingestProposal( Mockito.any(), - Mockito.any(AuditStamp.class)); + Mockito.any(AuditStamp.class), Mockito.anyBoolean()); BatchRemoveTermsResolver resolver = new BatchRemoveTermsResolver(mockService); diff --git a/datahub-web-react/README.md b/datahub-web-react/README.md index 3cc0f1fc83eacc..891735a7c55e98 100644 --- a/datahub-web-react/README.md +++ b/datahub-web-react/README.md @@ -74,7 +74,10 @@ The best workaround is to revert to the Active LTS version of Node, 16.13.0 with #### Customizing your App without rebuilding assets To see the results of any change to a theme, you will need to rebuild your datahub-frontend-react container. While this may work for some users, if you don't want to rebuild your container -you can still customize the homepage's logo without rebuilding. You can do this by setting the REACT_APP_LOGO_URL env variable when deploying GMS. +you can change two things without rebuilding. + +1. You customize the logo on the homepage & the search bar header by setting the `REACT_APP_LOGO_URL` env variable when deploying GMS. +2. You can customize the favicon (the icon on your browser tab) by setting the `REACT_APP_FAVICON_URL` env var when deploying GMS. #### Selecting a theme diff --git a/datahub-web-react/public/meta-favicon.ico b/datahub-web-react/public/meta-favicon.ico new file mode 100644 index 00000000000000..1587e379073865 Binary files /dev/null and b/datahub-web-react/public/meta-favicon.ico differ diff --git a/datahub-web-react/src/Mocks.tsx b/datahub-web-react/src/Mocks.tsx index 35a15521a6952c..78a61635f24a95 100644 --- a/datahub-web-react/src/Mocks.tsx +++ b/datahub-web-react/src/Mocks.tsx @@ -27,6 +27,7 @@ import { RelationshipDirection, Container, PlatformPrivileges, + FilterOperator, } from './types.generated'; import { GetTagDocument } from './graphql/tag.generated'; import { GetMlModelDocument } from './graphql/mlModel.generated'; @@ -1701,7 +1702,8 @@ export const mocks = [ path: [], start: 0, count: 20, - filters: null, + filters: [], + orFilters: [], }, }, }, @@ -1735,7 +1737,8 @@ export const mocks = [ path: ['prod', 'hdfs'], start: 0, count: 20, - filters: null, + filters: [], + orFilters: [], }, }, }, @@ -1769,7 +1772,8 @@ export const mocks = [ path: ['prod'], start: 0, count: 20, - filters: null, + filters: [], + orFilters: [], }, }, }, @@ -1873,6 +1877,7 @@ export const mocks = [ start: 0, count: 10, filters: [], + orFilters: [], }, }, }, @@ -1946,10 +1951,17 @@ export const mocks = [ query: 'test', start: 0, count: 10, - filters: [ + filters: [], + orFilters: [ { - field: 'platform', - value: 'kafka', + and: [ + { + field: 'platform', + values: ['kafka'], + negated: false, + condition: FilterOperator.Equal, + }, + ], }, ], }, @@ -2019,6 +2031,7 @@ export const mocks = [ start: 0, count: 1, filters: [], + orFilters: [], }, }, }, @@ -2109,14 +2122,17 @@ export const mocks = [ query: 'test', start: 0, count: 10, - filters: [ - { - field: 'platform', - value: 'kafka', - }, + filters: [], + orFilters: [ { - field: 'platform', - value: 'hdfs', + and: [ + { + field: 'platform', + values: ['kafka', 'hdfs'], + negated: false, + condition: FilterOperator.Equal, + }, + ], }, ], }, @@ -2256,6 +2272,7 @@ export const mocks = [ start: 0, count: 1, filters: [], + orFilters: [], }, }, }, @@ -2283,6 +2300,7 @@ export const mocks = [ start: 0, count: 1, filters: [], + orFilters: [], }, }, }, @@ -2348,6 +2366,7 @@ export const mocks = [ start: 0, count: 20, filters: [], + orFilters: [], }, }, }, @@ -2421,6 +2440,7 @@ export const mocks = [ start: 0, count: 10, filters: [], + orFilters: [], }, }, }, @@ -2564,6 +2584,7 @@ export const mocks = [ start: 0, count: 10, filters: [], + orFilters: [], }, }, }, @@ -2637,10 +2658,17 @@ export const mocks = [ query: 'test', start: 0, count: 10, - filters: [ + filters: [], + orFilters: [ { - field: 'platform', - value: 'kafka', + and: [ + { + field: 'platform', + values: ['kafka'], + negated: false, + condition: FilterOperator.Equal, + }, + ], }, ], }, @@ -2738,10 +2766,17 @@ export const mocks = [ query: 'test', start: 0, count: 10, - filters: [ + filters: [], + orFilters: [ { - field: 'platform', - value: 'kafka', + and: [ + { + field: 'platform', + values: ['kafka'], + negated: false, + condition: FilterOperator.Equal, + }, + ], }, ], }, @@ -2780,6 +2815,7 @@ export const mocks = [ start: 0, count: 10, filters: [], + orFilters: [], }, }, }, @@ -2841,6 +2877,7 @@ export const mocks = [ start: 0, count: 1, filters: [], + orFilters: [], }, }, }, @@ -2906,6 +2943,7 @@ export const mocks = [ start: 0, count: 20, filters: [], + orFilters: [], }, }, }, @@ -2978,14 +3016,17 @@ export const mocks = [ query: 'test', start: 0, count: 10, - filters: [ - { - field: 'platform', - value: 'kafka', - }, + filters: [], + orFilters: [ { - field: 'platform', - value: 'hdfs', + and: [ + { + field: 'platform', + values: ['kafka', 'hdfs'], + negated: false, + condition: FilterOperator.Equal, + }, + ], }, ], }, @@ -3052,14 +3093,17 @@ export const mocks = [ query: 'test', start: 0, count: 10, - filters: [ - { - field: 'platform', - value: 'kafka', - }, + filters: [], + orFilters: [ { - field: 'platform', - value: 'hdfs', + and: [ + { + field: 'platform', + values: ['kafka', 'hdfs'], + negated: false, + condition: FilterOperator.Equal, + }, + ], }, ], }, @@ -3251,6 +3295,7 @@ export const mocks = [ start: 0, count: 10, filters: [], + orFilters: [], }, }, }, @@ -3307,6 +3352,7 @@ export const mocks = [ start: 0, count: 6, filters: [], + orFilters: [], }, }, }, diff --git a/datahub-web-react/src/app/entity/EntityRegistry.tsx b/datahub-web-react/src/app/entity/EntityRegistry.tsx index 6aa24d9cf2196e..535902267772cb 100644 --- a/datahub-web-react/src/app/entity/EntityRegistry.tsx +++ b/datahub-web-react/src/app/entity/EntityRegistry.tsx @@ -140,6 +140,8 @@ export default class EntityRegistry { numUpstreamChildren: genericEntityProperties?.upstream?.total, status: genericEntityProperties?.status, siblingPlatforms: genericEntityProperties?.siblingPlatforms, + fineGrainedLineages: genericEntityProperties?.fineGrainedLineages, + schemaMetadata: genericEntityProperties?.schemaMetadata, } as FetchedEntity) || undefined ); } diff --git a/datahub-web-react/src/app/entity/container/ContainerEntitiesTab.tsx b/datahub-web-react/src/app/entity/container/ContainerEntitiesTab.tsx index b4fd67806edfe1..7aaa5b2c6ee81e 100644 --- a/datahub-web-react/src/app/entity/container/ContainerEntitiesTab.tsx +++ b/datahub-web-react/src/app/entity/container/ContainerEntitiesTab.tsx @@ -7,7 +7,7 @@ export const ContainerEntitiesTab = () => { const fixedFilter = { field: 'container', - value: urn, + values: [urn], }; return ( diff --git a/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx b/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx index 7e5ed2b5d482d2..02d73982550e87 100644 --- a/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx +++ b/datahub-web-react/src/app/entity/dataJob/DataJobEntity.tsx @@ -17,6 +17,7 @@ import { getDataForEntityType } from '../shared/containers/profile/utils'; import { SidebarDomainSection } from '../shared/containers/profile/sidebar/Domain/SidebarDomainSection'; import { RunsTab } from './tabs/RunsTab'; import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; +import { DataFlowEntity } from '../dataFlow/DataFlowEntity'; const getDataJobPlatformName = (data?: DataJob): string => { return data?.dataFlow?.platform?.properties?.displayName || data?.dataFlow?.platform?.name || ''; @@ -174,10 +175,29 @@ export class DataJobEntity implements Entity { ); }; + getExpandedNameForDataJob = (entity: DataJob): string => { + const name = this.displayName(entity); + const flowName = entity?.dataFlow ? new DataFlowEntity().displayName(entity?.dataFlow) : undefined; + + // if we have no name, just return blank. this should not happen, so dont try & construct a name + if (!name) { + return ''; + } + + // if we have a flow name, return the full name of flow.task + if (flowName) { + return `${flowName}.${name}`; + } + + // otherwise, just return the task name (same as non-expanded) + return name; + }; + getLineageVizConfig = (entity: DataJob) => { return { urn: entity?.urn, - name: entity?.properties?.name || '', + name: this.displayName(entity), + expandedName: this.getExpandedNameForDataJob(entity), type: EntityType.DataJob, icon: entity?.dataFlow?.platform?.properties?.logoUrl || '', platform: entity?.dataFlow?.platform, diff --git a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx index 0953d40605e225..eb6724acb2c8bd 100644 --- a/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx +++ b/datahub-web-react/src/app/entity/dataset/DatasetEntity.tsx @@ -1,10 +1,8 @@ import * as React from 'react'; import { DatabaseFilled, DatabaseOutlined } from '@ant-design/icons'; -import { Typography } from 'antd'; import { Dataset, DatasetProperties, EntityType, OwnershipType, SearchResult } from '../../../types.generated'; import { Entity, EntityCapabilityType, IconStyleType, PreviewType } from '../Entity'; import { Preview } from './preview/Preview'; -import { FIELDS_TO_HIGHLIGHT } from './search/highlights'; import { EntityProfile } from '../shared/containers/profile/EntityProfile'; import { GetDatasetQuery, useGetDatasetQuery, useUpdateDatasetMutation } from '../../../graphql/dataset.generated'; import { GenericEntityProperties } from '../shared/types'; @@ -28,8 +26,7 @@ import { OperationsTab } from './profile/OperationsTab'; import { EntityMenuItems } from '../shared/EntityDropdown/EntityDropdown'; import { SidebarSiblingsSection } from '../shared/containers/profile/sidebar/SidebarSiblingsSection'; import { DatasetStatsSummarySubHeader } from './profile/stats/stats/DatasetStatsSummarySubHeader'; -import { TagSummary } from './shared/TagSummary'; -import { TermSummary } from './shared/TermSummary'; +import { DatasetSearchSnippet } from './DatasetSearchSnippet'; const SUBTYPES = { VIEW: 'view', @@ -256,18 +253,6 @@ export class DatasetEntity implements Entity { const data = result.entity as Dataset; const genericProperties = this.getGenericEntityProperties(data); - let snippet: React.ReactNode; - - if (result.matchedFields.length > 0) { - if (result.matchedFields[0].value.includes('urn:li:tag')) { - snippet = ; - } else if (result.matchedFields[0].value.includes('urn:li:glossaryTerm')) { - snippet = ; - } else { - snippet = {result.matchedFields[0].value}; - } - } - return ( { subtype={data.subTypes?.typeNames?.[0]} container={data.container} parentContainers={data.parentContainers} - snippet={ - // Add match highlights only if all the matched fields are in the FIELDS_TO_HIGHLIGHT - result.matchedFields.length > 0 && - result.matchedFields.every((field) => FIELDS_TO_HIGHLIGHT.has(field.name)) && ( - - Matches {FIELDS_TO_HIGHLIGHT.get(result.matchedFields[0].name)} {snippet} - - ) - } + snippet={} insights={result.insights} externalUrl={data.properties?.externalUrl} statsSummary={data.statsSummary} diff --git a/datahub-web-react/src/app/entity/dataset/DatasetSearchSnippet.tsx b/datahub-web-react/src/app/entity/dataset/DatasetSearchSnippet.tsx new file mode 100644 index 00000000000000..16da7ba8b06fe5 --- /dev/null +++ b/datahub-web-react/src/app/entity/dataset/DatasetSearchSnippet.tsx @@ -0,0 +1,36 @@ +import React from 'react'; + +import { Typography } from 'antd'; +import { MatchedField } from '../../../types.generated'; +import { TagSummary } from './shared/TagSummary'; +import { TermSummary } from './shared/TermSummary'; +import { FIELDS_TO_HIGHLIGHT } from './search/highlights'; +import { getMatchPrioritizingPrimary } from '../shared/utils'; + +type Props = { + matchedFields: MatchedField[]; +}; + +const LABEL_INDEX_NAME = 'fieldLabels'; + +export const DatasetSearchSnippet = ({ matchedFields }: Props) => { + const matchedField = getMatchPrioritizingPrimary(matchedFields, LABEL_INDEX_NAME); + + let snippet: React.ReactNode; + + if (matchedField) { + if (matchedField.value.includes('urn:li:tag')) { + snippet = ; + } else if (matchedField.value.includes('urn:li:glossaryTerm')) { + snippet = ; + } else { + snippet = {matchedField.value}; + } + } + + return matchedField ? ( + + Matches {FIELDS_TO_HIGHLIGHT.get(matchedField.name)} {snippet}{' '} + + ) : null; +}; diff --git a/datahub-web-react/src/app/entity/domain/DomainEntitiesTab.tsx b/datahub-web-react/src/app/entity/domain/DomainEntitiesTab.tsx index a5db4ecac75e24..8fe2b9c1178754 100644 --- a/datahub-web-react/src/app/entity/domain/DomainEntitiesTab.tsx +++ b/datahub-web-react/src/app/entity/domain/DomainEntitiesTab.tsx @@ -11,7 +11,7 @@ export const DomainEntitiesTab = () => { if (entityType === EntityType.Domain) { fixedFilter = { field: 'domains', - value: urn, + values: [urn], }; } diff --git a/datahub-web-react/src/app/entity/group/GroupAssets.tsx b/datahub-web-react/src/app/entity/group/GroupAssets.tsx index 3dd1e9f97e4412..0417dc1b13b78e 100644 --- a/datahub-web-react/src/app/entity/group/GroupAssets.tsx +++ b/datahub-web-react/src/app/entity/group/GroupAssets.tsx @@ -14,7 +14,7 @@ export const GroupAssets = ({ urn }: Props) => { return ( diff --git a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx index a8aec44c73f443..f128c7b513a848 100644 --- a/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx +++ b/datahub-web-react/src/app/entity/group/GroupInfoSideBar.tsx @@ -1,10 +1,11 @@ import { Divider, message, Space, Button, Typography, Row, Col, Tooltip } from 'antd'; -import React, { useState } from 'react'; +import React, { useEffect, useState } from 'react'; import styled from 'styled-components'; import { EditOutlined, LockOutlined, MailOutlined, SlackOutlined } from '@ant-design/icons'; import { useHistory, useRouteMatch } from 'react-router-dom'; import { useUpdateCorpGroupPropertiesMutation } from '../../../graphql/group.generated'; import { EntityRelationshipsResult, Ownership } from '../../../types.generated'; +import { useUpdateNameMutation } from '../../../graphql/mutations.generated'; import GroupEditModal from './GroupEditModal'; import CustomAvatar from '../../shared/avatar/CustomAvatar'; @@ -20,6 +21,7 @@ import { GroupsSection, } from '../shared/SidebarStyledComponents'; import GroupMembersSideBarSection from './GroupMembersSideBarSection'; +import { useGetAuthenticatedUser } from '../../useGetAuthenticatedUser'; const { Paragraph } = Typography; @@ -34,7 +36,7 @@ type SideBarData = { groupOwnerShip: Ownership; isExternalGroup: boolean; externalGroupType: string | undefined; - urn: string | undefined; + urn: string; }; type Props = { @@ -61,10 +63,21 @@ const GroupNameHeader = styled(Row)` min-height: 100px; `; -const GroupName = styled.div` +const GroupTitle = styled(Typography.Title)` max-width: 260px; word-wrap: break-word; width: 140px; + + &&& { + margin-bottom: 0; + word-break: break-all; + margin-left: 10px; + } + + .ant-typography-edit { + font-size: 16px; + margin-left: 10px; + } `; /** @@ -90,7 +103,31 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { /* eslint-disable @typescript-eslint/no-unused-vars */ const [editGroupModal, showEditGroupModal] = useState(false); - const canEditGroup = true; // TODO; Replace this will fine-grained understanding of user permissions. + const me = useGetAuthenticatedUser(); + const canEditGroup = me?.platformPrivileges.manageIdentities; + const [groupTitle, setGroupTitle] = useState(name); + const [updateName] = useUpdateNameMutation(); + + useEffect(() => { + setGroupTitle(groupTitle); + }, [groupTitle]); + + // Update Group Title + // eslint-disable-next-line @typescript-eslint/no-shadow + const handleTitleUpdate = async (name: string) => { + setGroupTitle(name); + await updateName({ variables: { input: { name, urn } } }) + .then(() => { + message.success({ content: 'Name Updated', duration: 2 }); + refetch(); + }) + .catch((e: unknown) => { + message.destroy(); + if (e instanceof Error) { + message.error({ content: `Failed to update name: \n ${e.message || ''}`, duration: 3 }); + } + }); + }; const getEditModalData = { urn, @@ -135,7 +172,9 @@ export default function GroupInfoSidebar({ sideBarData, refetch }: Props) { /> - {name} + + {groupTitle} + {isExternalGroup && ( diff --git a/datahub-web-react/src/app/entity/shared/__tests__/utils.test.ts b/datahub-web-react/src/app/entity/shared/__tests__/utils.test.ts new file mode 100644 index 00000000000000..86dec46528b494 --- /dev/null +++ b/datahub-web-react/src/app/entity/shared/__tests__/utils.test.ts @@ -0,0 +1,37 @@ +import { getMatchPrioritizingPrimary } from '../utils'; + +const MOCK_MATCHED_FIELDS = [ + { + name: 'fieldPaths', + value: 'rain', + }, + { + name: 'description', + value: 'rainbow', + }, + { + name: 'fieldPaths', + value: 'rainbow', + }, + { + name: 'fieldPaths', + value: 'rainbows', + }, +]; + +describe('utils', () => { + describe('getMatchPrioritizingPrimary', () => { + it('prioritizes exact match', () => { + global.window.location.search = 'query=rainbow'; + const match = getMatchPrioritizingPrimary(MOCK_MATCHED_FIELDS, 'fieldPaths'); + expect(match?.value).toEqual('rainbow'); + expect(match?.name).toEqual('fieldPaths'); + }); + it('will accept first contains match', () => { + global.window.location.search = 'query=bow'; + const match = getMatchPrioritizingPrimary(MOCK_MATCHED_FIELDS, 'fieldPaths'); + expect(match?.value).toEqual('rainbow'); + expect(match?.name).toEqual('fieldPaths'); + }); + }); +}); diff --git a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx index 7bd19066758b88..79c64bde34c47f 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/DeprecationPill.tsx @@ -1,11 +1,13 @@ -import { InfoCircleOutlined } from '@ant-design/icons'; -import { Divider, Popover, Tooltip, Typography } from 'antd'; import React from 'react'; +import { InfoCircleOutlined } from '@ant-design/icons'; +import { Divider, message, Modal, Popover, Tooltip, Typography } from 'antd'; +import { blue } from '@ant-design/colors'; import styled from 'styled-components'; import moment from 'moment'; import { Deprecation } from '../../../../../types.generated'; import { getLocaleTimezone } from '../../../../shared/time/timeUtils'; import { ANTD_GRAY } from '../../constants'; +import { useBatchUpdateDeprecationMutation } from '../../../../../graphql/mutations.generated'; const DeprecatedContainer = styled.div` width: 104px; @@ -55,12 +57,30 @@ const StyledInfoCircleOutlined = styled(InfoCircleOutlined)` color: #ef5b5b; `; +const UndeprecatedIcon = styled(InfoCircleOutlined)` + font-size: 14px; + padding-right: 6px; +`; + +const IconGroup = styled.div` + font-size: 12px; + color: 'black'; + &:hover { + color: ${blue[4]}; + cursor: pointer; + } +`; + type Props = { + urn: string; deprecation: Deprecation; preview?: boolean | null; + refetch?: () => void; + showUndeprecate: boolean | null; }; -export const DeprecationPill = ({ deprecation, preview }: Props) => { +export const DeprecationPill = ({ deprecation, preview, urn, refetch, showUndeprecate }: Props) => { + const [batchUpdateDeprecationMutation] = useBatchUpdateDeprecationMutation(); /** * Deprecation Decommission Timestamp */ @@ -78,6 +98,30 @@ export const DeprecationPill = ({ deprecation, preview }: Props) => { const hasDetails = deprecation.note !== '' || deprecation.decommissionTime !== null; const isDividerNeeded = deprecation.note !== '' && deprecation.decommissionTime !== null; + const batchUndeprecate = () => { + batchUpdateDeprecationMutation({ + variables: { + input: { + resources: [{ resourceUrn: urn }], + deprecated: false, + }, + }, + }) + .then(({ errors }) => { + if (!errors) { + message.success({ content: 'Marked assets as un-deprecated!', duration: 2 }); + refetch?.(); + } + }) + .catch((e) => { + message.destroy(); + message.error({ + content: `Failed to mark assets as un-deprecated: \n ${e.message || ''}`, + duration: 3, + }); + }); + }; + return ( { )} + {isDividerNeeded && } + {showUndeprecate && ( + + Modal.confirm({ + title: `Confirm Mark as un-deprecated`, + content: `Are you sure you want to mark this asset as un-deprecated?`, + onOk() { + batchUndeprecate(); + }, + onCancel() {}, + okText: 'Yes', + maskClosable: true, + closable: true, + }) + } + > + + Mark as un-deprecated + + )} ) : ( 'No additional details' diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx index f0c6cb5dca6519..74792d179baffa 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearch.tsx @@ -2,7 +2,7 @@ import React, { useState, useEffect } from 'react'; import styled from 'styled-components'; import { ApolloError } from '@apollo/client'; import { EntityType, FacetFilterInput } from '../../../../../../types.generated'; -import { ENTITY_FILTER_NAME } from '../../../../../search/utils/constants'; +import { ENTITY_FILTER_NAME, UnionType } from '../../../../../search/utils/constants'; import { SearchCfg } from '../../../../../../conf'; import { EmbeddedListSearchResults } from './EmbeddedListSearchResults'; import EmbeddedListSearchHeader from './EmbeddedListSearchHeader'; @@ -11,6 +11,7 @@ import { GetSearchResultsParams, SearchResultsInterface } from './types'; import { isListSubset } from '../../../utils'; import { EntityAndType } from '../../../types'; import { Message } from '../../../../../shared/Message'; +import { generateOrFilters } from '../../../../../search/utils/generateOrFilters'; const Container = styled.div` display: flex; @@ -48,10 +49,12 @@ export const addFixedQuery = (baseQuery: string, fixedQuery: string, emptyQuery: type Props = { query: string; page: number; + unionType: UnionType; filters: FacetFilterInput[]; onChangeQuery: (query) => void; onChangeFilters: (filters) => void; onChangePage: (page) => void; + onChangeUnionType: (unionType: UnionType) => void; emptySearchQuery?: string | null; fixedFilter?: FacetFilterInput | null; fixedQuery?: string | null; @@ -72,9 +75,11 @@ export const EmbeddedListSearch = ({ query, filters, page, + unionType, onChangeQuery, onChangeFilters, onChangePage, + onChangeUnionType, emptySearchQuery, fixedFilter, fixedQuery, @@ -95,7 +100,7 @@ export const EmbeddedListSearch = ({ const finalFilters = (fixedFilter && [...filtersWithoutEntities, fixedFilter]) || filtersWithoutEntities; const entityFilters: Array = filters .filter((filter) => filter.field === ENTITY_FILTER_NAME) - .map((filter) => filter.value.toUpperCase() as EntityType); + .flatMap((filter) => filter.values.map((value) => value?.toUpperCase() as EntityType)); const [showFilters, setShowFilters] = useState(defaultShowFilters || false); const [isSelectMode, setIsSelectMode] = useState(false); @@ -109,7 +114,8 @@ export const EmbeddedListSearch = ({ query: finalQuery, start: (page - 1) * SearchCfg.RESULTS_PER_PAGE, count: SearchCfg.RESULTS_PER_PAGE, - filters: finalFilters, + filters: [], + orFilters: generateOrFilters(unionType, finalFilters), }, }, skip: true, @@ -126,7 +132,8 @@ export const EmbeddedListSearch = ({ query: finalQuery, start: (page - 1) * numResultsPerPage, count: numResultsPerPage, - filters: finalFilters, + filters: [], + orFilters: generateOrFilters(unionType, finalFilters), }, }, }); @@ -200,12 +207,14 @@ export const EmbeddedListSearch = ({ searchBarInputStyle={searchBarInputStyle} /> (''); const [page, setPage] = useState(1); + const [unionType, setUnionType] = useState(UnionType.AND); + const [filters, setFilters] = useState>([]); const onChangeQuery = (q: string) => { @@ -70,9 +73,11 @@ export const EmbeddedListSearchModal = ({ query={query} filters={filters} page={page} + unionType={unionType} onChangeQuery={onChangeQuery} onChangeFilters={onChangeFilters} onChangePage={onChangePage} + onChangeUnionType={setUnionType} emptySearchQuery={emptySearchQuery} fixedFilter={fixedFilter} fixedQuery={fixedQuery} diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx index 6fc3bc3bd9e984..d70385bf2b243e 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchResults.tsx @@ -2,11 +2,12 @@ import React from 'react'; import { Pagination, Typography } from 'antd'; import styled from 'styled-components'; import { FacetFilterInput, FacetMetadata, SearchResults as SearchResultType } from '../../../../../../types.generated'; -import { SearchFilters } from '../../../../../search/SearchFilters'; import { SearchCfg } from '../../../../../../conf'; import { EntityNameList } from '../../../../../recommendations/renderer/component/EntityNameList'; import { ReactComponent as LoadingSvg } from '../../../../../../images/datahub-logo-color-loading_pendulum.svg'; import { EntityAndType } from '../../../types'; +import { UnionType } from '../../../../../search/utils/constants'; +import { SearchFiltersSection } from '../../../../../search/SearchFiltersSection'; const SearchBody = styled.div` height: 100%; @@ -44,33 +45,11 @@ const PaginationInfoContainer = styled.span` align-items: center; `; -const FiltersHeader = styled.div` - font-size: 14px; - font-weight: 600; - flex: 0 0 auto; - - padding-left: 20px; - padding-right: 20px; - padding-bottom: 8px; - - width: 100%; - height: 46px; - line-height: 46px; - border-bottom: 1px solid; - border-color: ${(props) => props.theme.styles['border-color-base']}; -`; - const StyledPagination = styled(Pagination)` margin: 0px; padding: 0px; `; -const SearchFilterContainer = styled.div` - padding-top: 10px; - flex: 1 1 auto; - overflow: hidden; -`; - const LoadingContainer = styled.div` padding-top: 40px; padding-bottom: 40px; @@ -86,8 +65,10 @@ interface Props { selectedFilters: Array; loading: boolean; showFilters?: boolean; + unionType: UnionType; onChangeFilters: (filters: Array) => void; onChangePage: (page: number) => void; + onChangeUnionType: (unionType: UnionType) => void; isSelectMode: boolean; selectedEntities: EntityAndType[]; setSelectedEntities: (entities: EntityAndType[]) => any; @@ -102,6 +83,8 @@ export const EmbeddedListSearchResults = ({ selectedFilters, loading, showFilters, + unionType, + onChangeUnionType, onChangeFilters, onChangePage, isSelectMode, @@ -120,15 +103,14 @@ export const EmbeddedListSearchResults = ({ {!!showFilters && ( - Filter - - onChangeFilters(newFilters)} - /> - + )} diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchSection.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchSection.tsx index c813b97ecfed57..8e8f5b6b16a08f 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchSection.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/EmbeddedListSearchSection.tsx @@ -8,6 +8,7 @@ import { navigateToEntitySearchUrl } from './navigateToEntitySearchUrl'; import { GetSearchResultsParams, SearchResultsInterface } from './types'; import { useEntityQueryParams } from '../../../containers/profile/utils'; import { EmbeddedListSearch } from './EmbeddedListSearch'; +import { UnionType } from '../../../../../search/utils/constants'; type Props = { emptySearchQuery?: string | null; @@ -44,6 +45,8 @@ export const EmbeddedListSearchSection = ({ const params = QueryString.parse(location.search, { arrayFormat: 'comma' }); const query: string = params?.query as string; const page: number = params.page && Number(params.page as string) > 0 ? Number(params.page as string) : 1; + const unionType: UnionType = Number(params.unionType as any as UnionType) || UnionType.AND; + const filters: Array = useFilters(params); const onSearch = (q: string) => { @@ -54,6 +57,7 @@ export const EmbeddedListSearchSection = ({ page: 1, filters, history, + unionType, }); }; @@ -65,6 +69,7 @@ export const EmbeddedListSearchSection = ({ page: 1, filters: newFilters, history, + unionType, }); }; @@ -76,6 +81,19 @@ export const EmbeddedListSearchSection = ({ page: newPage, filters, history, + unionType, + }); + }; + + const onChangeUnionType = (newUnionType: UnionType) => { + navigateToEntitySearchUrl({ + baseUrl: location.pathname, + baseParams, + query, + page, + filters, + history, + unionType: newUnionType, }); }; @@ -83,10 +101,12 @@ export const EmbeddedListSearchSection = ({ (''); const [page, setPage] = useState(1); const [filters, setFilters] = useState>([]); + const [unionType, setUnionType] = useState(UnionType.AND); const [showFilters, setShowFilters] = useState(false); const [numResultsPerPage, setNumResultsPerPage] = useState(SearchCfg.RESULTS_PER_PAGE); @@ -70,7 +71,7 @@ export const SearchSelect = ({ fixedEntityTypes, placeholderText, selectedEntiti ); const entityFilters: Array = filters .filter((filter) => filter.field === ENTITY_FILTER_NAME) - .map((filter) => filter.value.toUpperCase() as EntityType); + .flatMap((filter) => filter.values.map((value) => value.toUpperCase() as EntityType)); const finalEntityTypes = (entityFilters.length > 0 && entityFilters) || fixedEntityTypes || []; // Execute search @@ -166,9 +167,11 @@ export const SearchSelect = ({ fixedEntityTypes, placeholderText, selectedEntiti loading={loading} searchResponse={searchAcrossEntities} filters={facets} + unionType={unionType} selectedFilters={filters} onChangeFilters={onChangeFilters} onChangePage={onChangePage} + onChangeUnionType={setUnionType} page={page} showFilters={showFilters} numResultsPerPage={numResultsPerPage} diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/action/DeprecationDropdown.tsx b/datahub-web-react/src/app/entity/shared/components/styled/search/action/DeprecationDropdown.tsx index 9e4c22e1748273..43e44eee421191 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/action/DeprecationDropdown.tsx +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/action/DeprecationDropdown.tsx @@ -26,13 +26,16 @@ export default function DeprecationDropdown({ urns, disabled = false, refetch }: }) .then(({ errors }) => { if (!errors) { - message.success({ content: 'Marked assets as undeprecated!', duration: 2 }); + message.success({ content: 'Marked assets as un-deprecated!', duration: 2 }); refetch?.(); } }) .catch((e) => { message.destroy(); - message.error({ content: `Failed to mark assets as undeprecated: \n ${e.message || ''}`, duration: 3 }); + message.error({ + content: `Failed to mark assets as un-deprecated: \n ${e.message || ''}`, + duration: 3, + }); }); }; @@ -48,11 +51,11 @@ export default function DeprecationDropdown({ urns, disabled = false, refetch }: }, }, { - title: 'Mark as undeprecated', + title: 'Mark as un-deprecated', onClick: () => { Modal.confirm({ - title: `Confirm Mark as undeprecated`, - content: `Are you sure you want to mark these assets as undeprecated?`, + title: `Confirm Mark as un-deprecated`, + content: `Are you sure you want to mark these assets as un-deprecated?`, onOk() { batchUndeprecate(); }, diff --git a/datahub-web-react/src/app/entity/shared/components/styled/search/navigateToEntitySearchUrl.ts b/datahub-web-react/src/app/entity/shared/components/styled/search/navigateToEntitySearchUrl.ts index e0f59c4f7fa2ac..20ede4f2ae502e 100644 --- a/datahub-web-react/src/app/entity/shared/components/styled/search/navigateToEntitySearchUrl.ts +++ b/datahub-web-react/src/app/entity/shared/components/styled/search/navigateToEntitySearchUrl.ts @@ -2,6 +2,7 @@ import { RouteComponentProps } from 'react-router'; import * as QueryString from 'query-string'; import { EntityType, FacetFilterInput } from '../../../../../../types.generated'; import filtersToQueryStringParams from '../../../../../search/utils/filtersToQueryStringParams'; +import { UnionType } from '../../../../../search/utils/constants'; export const navigateToEntitySearchUrl = ({ baseUrl, @@ -11,6 +12,7 @@ export const navigateToEntitySearchUrl = ({ page: newPage = 1, filters: newFilters, history, + unionType, }: { baseUrl: string; baseParams: Record; @@ -19,10 +21,11 @@ export const navigateToEntitySearchUrl = ({ page?: number; filters?: Array; history: RouteComponentProps['history']; + unionType: UnionType; }) => { const constructedFilters = newFilters || []; if (newType) { - constructedFilters.push({ field: 'entity', value: newType }); + constructedFilters.push({ field: 'entity', values: [newType] }); } const search = QueryString.stringify( @@ -30,6 +33,7 @@ export const navigateToEntitySearchUrl = ({ ...filtersToQueryStringParams(constructedFilters), query: newQuery, page: newPage, + unionType, ...baseParams, }, { arrayFormat: 'comma' }, diff --git a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx index 81f83e6c55ab3f..740386fac25536 100644 --- a/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx +++ b/datahub-web-react/src/app/entity/shared/containers/profile/header/EntityHeader.tsx @@ -125,7 +125,13 @@ export const EntityHeader = ({ {entityData?.deprecation?.deprecated && ( - + )} {entityData?.health?.map((health) => ( ; filterText?: string; }; + +const EMPTY_SET: Set = new Set(); + export default function SchemaTable({ rows, schemaMetadata, @@ -56,7 +59,7 @@ export default function SchemaTable({ editMode = true, schemaFieldBlameList, showSchemaAuditView, - expandedRowsFromFilter = new Set(), + expandedRowsFromFilter = EMPTY_SET, filterText = '', }: Props): JSX.Element { const hasUsageStats = useMemo(() => (usageStats?.aggregations?.fields?.length || 0) > 0, [usageStats]); diff --git a/datahub-web-react/src/app/entity/shared/tabs/Lineage/ImpactAnalysis.tsx b/datahub-web-react/src/app/entity/shared/tabs/Lineage/ImpactAnalysis.tsx index 5993df25bbef67..e5d65f99336311 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Lineage/ImpactAnalysis.tsx +++ b/datahub-web-react/src/app/entity/shared/tabs/Lineage/ImpactAnalysis.tsx @@ -33,7 +33,7 @@ export const ImpactAnalysis = ({ urn, direction }: Props) => { ); const entityFilters: Array = filters .filter((filter) => filter.field === ENTITY_FILTER_NAME) - .map((filter) => filter.value.toUpperCase() as EntityType); + .flatMap((filter) => filter.values.map((value) => value.toUpperCase() as EntityType)); const { data, loading } = useSearchAcrossLineageQuery({ variables: { @@ -67,7 +67,7 @@ export const ImpactAnalysis = ({ urn, direction }: Props) => { direction, })} defaultShowFilters - defaultFilters={[{ field: 'degree', value: '1' }]} + defaultFilters={[{ field: 'degree', values: ['1'] }]} /> ); diff --git a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts index 695ced5e0fcf6e..1dfb3fef380fa4 100644 --- a/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts +++ b/datahub-web-react/src/app/entity/shared/tabs/Lineage/generateUseSearchResultsViaRelationshipHook.ts @@ -12,7 +12,7 @@ export default function generateUseSearchResultsViaRelationshipHook({ return function useGetSearchResultsViaSearchAcrossLineage(params: GetSearchResultsParams) { const { variables: { - input: { types, query, start, count, filters }, + input: { types, query, start, count, filters, orFilters }, }, } = params; @@ -26,6 +26,7 @@ export default function generateUseSearchResultsViaRelationshipHook({ start, count, filters, + orFilters, }, }, }); @@ -42,6 +43,7 @@ export default function generateUseSearchResultsViaRelationshipHook({ start: refetchStart, count: refetchCount, filters: refetchFilters, + orFilters: refetchOrFilters, }, } = refetchParams; return refetch({ @@ -53,6 +55,7 @@ export default function generateUseSearchResultsViaRelationshipHook({ start: refetchStart, count: refetchCount, filters: refetchFilters, + orFilters: refetchOrFilters, }, }).then((res) => res.data.searchAcrossLineage); }, diff --git a/datahub-web-react/src/app/entity/shared/types.ts b/datahub-web-react/src/app/entity/shared/types.ts index 1003c69ec2d4ba..2791550ad76778 100644 --- a/datahub-web-react/src/app/entity/shared/types.ts +++ b/datahub-web-react/src/app/entity/shared/types.ts @@ -31,6 +31,7 @@ import { CustomPropertiesEntry, DomainAssociation, InputFields, + FineGrainedLineage, } from '../../../types.generated'; import { FetchedEntity } from '../../lineage/types'; @@ -97,6 +98,7 @@ export type GenericEntityProperties = { siblingPlatforms?: Maybe; lastIngested?: Maybe; inputFields?: Maybe; + fineGrainedLineages?: Maybe; }; export type GenericEntityUpdate = { diff --git a/datahub-web-react/src/app/entity/shared/utils.ts b/datahub-web-react/src/app/entity/shared/utils.ts index 7f6afc6fafaab6..32307d6ea4900a 100644 --- a/datahub-web-react/src/app/entity/shared/utils.ts +++ b/datahub-web-react/src/app/entity/shared/utils.ts @@ -1,3 +1,5 @@ +import * as QueryString from 'query-string'; + import { MatchedField } from '../../../types.generated'; import { FIELDS_TO_HIGHLIGHT } from '../dataset/search/highlights'; import { GenericEntityProperties } from './types'; @@ -83,14 +85,42 @@ export const isListSubset = (l1, l2): boolean => { return l1.every((result) => l2.indexOf(result) >= 0); }; +function normalize(value: string) { + return value.trim().toLowerCase(); +} + +function fromQueryGetBestMatch(selectedMatchedFields: MatchedField[], rawQuery: string) { + const query = normalize(rawQuery); + // first lets see if there's an exact match between a field value and the query + const exactMatch = selectedMatchedFields.find((field) => normalize(field.value) === query); + if (exactMatch) { + return exactMatch; + } + + // if no exact match exists, we'll see if the entire query is contained in any of the values + const containedMatch = selectedMatchedFields.find((field) => normalize(field.value).includes(query)); + if (containedMatch) { + return containedMatch; + } + + // otherwise, just return whichever is first + return selectedMatchedFields[0]; +} + export const getMatchPrioritizingPrimary = ( matchedFields: MatchedField[], primaryField: string, ): MatchedField | undefined => { - const primaryMatch = matchedFields.find((field) => field.name === primaryField); - if (primaryMatch) { - return primaryMatch; + const { location } = window; + const params = QueryString.parse(location.search, { arrayFormat: 'comma' }); + const query: string = decodeURIComponent(params.query ? (params.query as string) : ''); + + const primaryMatches = matchedFields.filter((field) => field.name === primaryField); + if (primaryMatches.length > 0) { + return fromQueryGetBestMatch(primaryMatches, query); } - return matchedFields.find((field) => FIELDS_TO_HIGHLIGHT.has(field.name)); + const matchesThatShouldBeShownOnFE = matchedFields.filter((field) => FIELDS_TO_HIGHLIGHT.has(field.name)); + + return fromQueryGetBestMatch(matchesThatShouldBeShownOnFE, query); }; diff --git a/datahub-web-react/src/app/entity/user/UserAssets.tsx b/datahub-web-react/src/app/entity/user/UserAssets.tsx index 8a8d4b569871e5..bd1f0b738fcda8 100644 --- a/datahub-web-react/src/app/entity/user/UserAssets.tsx +++ b/datahub-web-react/src/app/entity/user/UserAssets.tsx @@ -15,7 +15,7 @@ export const UserAssets = ({ urn }: Props) => { return ( diff --git a/datahub-web-react/src/app/home/HomePageHeader.tsx b/datahub-web-react/src/app/home/HomePageHeader.tsx index daf7c305bb67dd..cce62f9a2a3d63 100644 --- a/datahub-web-react/src/app/home/HomePageHeader.tsx +++ b/datahub-web-react/src/app/home/HomePageHeader.tsx @@ -166,6 +166,7 @@ export const HomePageHeader = () => { start: 0, count: 6, filters: [], + orFilters: [], }, }, }); diff --git a/datahub-web-react/src/app/home/HomePagePosts.tsx b/datahub-web-react/src/app/home/HomePagePosts.tsx new file mode 100644 index 00000000000000..97ef147f8dad62 --- /dev/null +++ b/datahub-web-react/src/app/home/HomePagePosts.tsx @@ -0,0 +1,62 @@ +import React from 'react'; +import { Divider, Typography } from 'antd'; +import styled from 'styled-components'; +import { useListPostsQuery } from '../../graphql/post.generated'; +import { Post, PostContentType } from '../../types.generated'; +import { PostTextCard } from '../search/PostTextCard'; +import { PostLinkCard } from '../search/PostLinkCard'; + +const RecommendationContainer = styled.div` + margin-bottom: 32px; + max-width: 1000px; + min-width: 750px; +`; + +const RecommendationTitle = styled(Typography.Title)` + margin-top: 0px; + margin-bottom: 0px; + padding: 0px; +`; + +const ThinDivider = styled(Divider)` + margin-top: 12px; + margin-bottom: 12px; +`; + +const LinkPostsContainer = styled.div` + display: flex; + flex-direction: row; +`; + +export const HomePagePosts = () => { + const { data: postsData } = useListPostsQuery({ + variables: { + input: { + start: 0, + count: 10, + }, + }, + }); + + const textPosts = + postsData?.listPosts?.posts?.filter((post) => post?.content?.contentType === PostContentType.Text) || []; + const linkPosts = + postsData?.listPosts?.posts?.filter((post) => post?.content?.contentType === PostContentType.Link) || []; + const hasPosts = textPosts.length > 0 || linkPosts.length > 0; + return hasPosts ? ( + + Pinned + + {textPosts.map((post) => ( + + ))} + + {linkPosts.map((post, index) => ( + + ))} + + + ) : ( + <> + ); +}; diff --git a/datahub-web-react/src/app/home/HomePageRecommendations.tsx b/datahub-web-react/src/app/home/HomePageRecommendations.tsx index ee65736efce09e..61e3d9a5b3e36a 100644 --- a/datahub-web-react/src/app/home/HomePageRecommendations.tsx +++ b/datahub-web-react/src/app/home/HomePageRecommendations.tsx @@ -1,5 +1,5 @@ import React, { useEffect, useState } from 'react'; -import styled from 'styled-components'; +import styled from 'styled-components/macro'; import { Button, Divider, Empty, Typography } from 'antd'; import { RocketOutlined } from '@ant-design/icons'; import { @@ -16,6 +16,7 @@ import { useGetEntityCountsQuery } from '../../graphql/app.generated'; import { GettingStartedModal } from './GettingStartedModal'; import { ANTD_GRAY } from '../entity/shared/constants'; import { useGetAuthenticatedUser } from '../useGetAuthenticatedUser'; +import { HomePagePosts } from './HomePagePosts'; const RecommendationsContainer = styled.div` margin-top: 32px; @@ -139,6 +140,7 @@ export const HomePageRecommendations = ({ userUrn }: Props) => { return ( + {orderedEntityCounts && orderedEntityCounts.length > 0 && ( {domainRecommendationModule && ( diff --git a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx index 5763209e6edf43..92bdce6d52ac66 100644 --- a/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx +++ b/datahub-web-react/src/app/ingest/secret/SecretBuilderModal.tsx @@ -41,6 +41,11 @@ export const SecretBuilderModal = ({ initialState, visible, onSubmit, onCancel } querySelectorToExecuteClick: '#createSecretButton', }); + function resetValues() { + setSecretBuilderState({}); + form.resetFields(); + } + return ( + )} + + )} + + + + ); +} diff --git a/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx b/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx index 5e9404d0c57be9..6a6079375445e9 100644 --- a/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx +++ b/datahub-web-react/src/app/lineage/__tests__/LineageTree.test.tsx @@ -37,6 +37,7 @@ describe('LineageTree', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -46,6 +47,12 @@ describe('LineageTree', () => { ); const downstreamData = constructTree( + { entity: dataset3WithLineage, type: EntityType.Dataset }, + mockFetchedEntities, + Direction.Downstream, + testEntityRegistry, + ); + const upstreamData = constructTree( { entity: dataset3WithLineage, type: EntityType.Dataset }, mockFetchedEntities, Direction.Upstream, @@ -66,7 +73,8 @@ describe('LineageTree', () => { {(zoom) => ( { setDraggedNodes={jest.fn()} onEntityCenter={jest.fn()} setHoveredEntity={jest.fn()} + fetchedEntities={mockFetchedEntities} /> )} diff --git a/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx b/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx index 22e38e24c86f2f..1778992ce33b6f 100644 --- a/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx +++ b/datahub-web-react/src/app/lineage/__tests__/adjustVXTreeLayout.test.tsx @@ -30,6 +30,7 @@ describe('adjustVXTreeLayout', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -78,6 +79,7 @@ describe('adjustVXTreeLayout', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -131,6 +133,7 @@ describe('adjustVXTreeLayout', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -173,6 +176,7 @@ describe('adjustVXTreeLayout', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -215,6 +219,7 @@ describe('adjustVXTreeLayout', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, diff --git a/datahub-web-react/src/app/lineage/__tests__/constructTree.test.ts b/datahub-web-react/src/app/lineage/__tests__/constructTree.test.ts index 158a47f6435fce..c647fe33df9b6c 100644 --- a/datahub-web-react/src/app/lineage/__tests__/constructTree.test.ts +++ b/datahub-web-react/src/app/lineage/__tests__/constructTree.test.ts @@ -39,6 +39,7 @@ describe('constructTree', () => { children: [], icon: undefined, platform: kafkaPlatform, + schemaMetadata: dataset3.schemaMetadata, }); }); @@ -50,6 +51,7 @@ describe('constructTree', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -65,7 +67,7 @@ describe('constructTree', () => { Direction.Downstream, testEntityRegistry, ), - ).toEqual({ + ).toMatchObject({ name: 'Display Name of Sixth', expandedName: 'Fully Qualified Name of Sixth Test Dataset', urn: 'urn:li:dataset:6', @@ -73,6 +75,7 @@ describe('constructTree', () => { unexploredChildren: 0, icon: undefined, platform: kafkaPlatform, + schemaMetadata: dataset6WithLineage.schemaMetadata, children: [ { name: 'Fourth Test Dataset', @@ -98,6 +101,7 @@ describe('constructTree', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -113,7 +117,7 @@ describe('constructTree', () => { Direction.Upstream, testEntityRegistry, ), - ).toEqual({ + ).toMatchObject({ name: 'Display Name of Sixth', expandedName: 'Fully Qualified Name of Sixth Test Dataset', urn: 'urn:li:dataset:6', @@ -121,6 +125,7 @@ describe('constructTree', () => { unexploredChildren: 0, icon: undefined, platform: kafkaPlatform, + schemaMetadata: dataset6WithLineage.schemaMetadata, children: [ { countercurrentChildrenUrns: [], @@ -147,6 +152,7 @@ describe('constructTree', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -162,7 +168,7 @@ describe('constructTree', () => { Direction.Upstream, testEntityRegistry, ), - ).toEqual({ + ).toMatchObject({ name: 'Yet Another Dataset', expandedName: 'Yet Another Dataset', urn: 'urn:li:dataset:3', @@ -170,6 +176,7 @@ describe('constructTree', () => { unexploredChildren: 0, icon: undefined, platform: kafkaPlatform, + schemaMetadata: dataset3WithLineage.schemaMetadata, children: [ { name: 'Fourth Test Dataset', @@ -238,6 +245,7 @@ describe('constructTree', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -266,6 +274,7 @@ describe('constructTree', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: EntityType.Dataset }, @@ -280,7 +289,7 @@ describe('constructTree', () => { Direction.Upstream, testEntityRegistry, ), - ).toEqual({ + ).toMatchObject({ name: 'Yet Another Dataset', expandedName: 'Yet Another Dataset', urn: 'urn:li:dataset:3', @@ -288,6 +297,7 @@ describe('constructTree', () => { unexploredChildren: 0, icon: undefined, platform: kafkaPlatform, + schemaMetadata: dataset3WithLineage.schemaMetadata, children: [ { name: 'Fourth Test Dataset', @@ -349,6 +359,7 @@ describe('constructTree', () => { const mockFetchedEntities = fetchedEntities.reduce( (acc, entry) => extendAsyncEntities( + {}, acc, testEntityRegistry, { entity: entry.entity, type: entry.entity.type }, @@ -372,10 +383,11 @@ describe('constructTree', () => { icon: undefined, platform: kafkaPlatform, subtype: undefined, + schemaMetadata: updatedDataset6WithLineage.schemaMetadata, children: [ { name: 'DataJobInfoName', - expandedName: undefined, + expandedName: 'DataFlowInfoName.DataJobInfoName', type: EntityType.DataJob, unexploredChildren: 0, urn: dataJob1.urn, diff --git a/datahub-web-react/src/app/lineage/constants.ts b/datahub-web-react/src/app/lineage/constants.ts index 41c2159f3c673d..66f956afb50019 100644 --- a/datahub-web-react/src/app/lineage/constants.ts +++ b/datahub-web-react/src/app/lineage/constants.ts @@ -5,5 +5,18 @@ export const INVERSE_RELATIONSHIPS = ['Produces', 'MemberOf']; export const HORIZONTAL_SPACE_PER_LAYER = 400; export const VERTICAL_SPACE_BETWEEN_NODES = 40; +export const EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT = 35; +export const NUM_COLUMNS_PER_PAGE = 10; +export const COLUMN_HEIGHT = 30; export const CURVE_PADDING = 75; + +export const width = 250; +export const height = 80; +export const iconWidth = 32; +export const iconHeight = 32; +export const iconX = -width / 2 + 22; +export const iconY = -iconHeight / 2; +export const centerX = -width / 2; +export const centerY = -height / 2; +export const textX = iconX + iconWidth + 8; diff --git a/datahub-web-react/src/app/lineage/types.ts b/datahub-web-react/src/app/lineage/types.ts index 410e36852495c7..947fabe160604f 100644 --- a/datahub-web-react/src/app/lineage/types.ts +++ b/datahub-web-react/src/app/lineage/types.ts @@ -13,6 +13,8 @@ import { Maybe, Status, DataPlatform, + FineGrainedLineage, + SchemaMetadata, } from '../../types.generated'; export type EntitySelectParams = { @@ -43,6 +45,8 @@ export type FetchedEntity = { platform?: DataPlatform; status?: Maybe; siblingPlatforms?: Maybe; + fineGrainedLineages?: [FineGrainedLineage]; + schemaMetadata?: SchemaMetadata; }; export type NodeData = { @@ -61,20 +65,31 @@ export type NodeData = { platform?: DataPlatform; status?: Maybe; siblingPlatforms?: Maybe; + schemaMetadata?: SchemaMetadata; }; export type VizNode = { x: number; y: number; data: NodeData; + direction: Direction; }; export type VizEdge = { source: VizNode; target: VizNode; + sourceField?: string; + targetField?: string; curve: { x: number; y: number }[]; }; +export type ColumnEdge = { + sourceUrn: string; + sourceField: string; + targetUrn: string; + targetField: string; +}; + export type FetchedEntities = { [x: string]: FetchedEntity }; export enum Direction { @@ -96,6 +111,7 @@ export type TreeProps = { onLineageExpand: (data: EntityAndType) => void; selectedEntity?: EntitySelectParams; hoveredEntity?: EntitySelectParams; + fineGrainedMap?: any; }; export type EntityAndType = diff --git a/datahub-web-react/src/app/lineage/utils/LineageExplorerContext.tsx b/datahub-web-react/src/app/lineage/utils/LineageExplorerContext.tsx index 3555b137f36949..c3bb4c73bb7ef8 100644 --- a/datahub-web-react/src/app/lineage/utils/LineageExplorerContext.tsx +++ b/datahub-web-react/src/app/lineage/utils/LineageExplorerContext.tsx @@ -1,9 +1,35 @@ import React from 'react'; +import { SchemaField, SchemaFieldRef } from '../../../types.generated'; +import { ColumnEdge } from '../types'; export const LineageExplorerContext = React.createContext({ expandTitles: false, + showColumns: false, + collapsedColumnsNodes: null, + setCollapsedColumnsNodes: null, + fineGrainedMap: { forward: [], reverse: [] }, + selectedField: null, + setSelectedField: () => {}, + highlightedEdges: [], + setHighlightedEdges: () => {}, + visibleColumnsByUrn: {}, + setVisibleColumnsByUrn: () => {}, + columnsByUrn: {}, + setColumnsByUrn: () => {}, }); type LineageExplorerContextType = { expandTitles: boolean; + showColumns: boolean; + collapsedColumnsNodes: any; + setCollapsedColumnsNodes: any; + fineGrainedMap: any; + selectedField: SchemaFieldRef | null; + setSelectedField: (field: SchemaFieldRef | null) => void; + highlightedEdges: ColumnEdge[]; + setHighlightedEdges: React.Dispatch>; + visibleColumnsByUrn: any; + setVisibleColumnsByUrn: any; + columnsByUrn: Record; + setColumnsByUrn: React.Dispatch>>; }; diff --git a/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts b/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts index 1f3834799f136e..307f98b6881027 100644 --- a/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts +++ b/datahub-web-react/src/app/lineage/utils/adjustVXTreeLayout.ts @@ -1,7 +1,7 @@ import { HierarchyPointNode } from '@vx/hierarchy/lib/types'; import { NodeData, Direction } from '../types'; // eslint-disable-next-line @typescript-eslint/no-unused-vars -import { width as nodeWidth } from '../LineageEntityNode'; +import { width as nodeWidth } from '../constants'; export default function adjustVXTreeLayout({ tree, diff --git a/datahub-web-react/src/app/lineage/utils/columnLineageUtils.ts b/datahub-web-react/src/app/lineage/utils/columnLineageUtils.ts new file mode 100644 index 00000000000000..2cf257d684bc6a --- /dev/null +++ b/datahub-web-react/src/app/lineage/utils/columnLineageUtils.ts @@ -0,0 +1,92 @@ +import { ColumnEdge, FetchedEntity, NodeData } from '../types'; +import { SchemaField } from '../../../types.generated'; + +export function getHighlightedColumnsForNode(highlightedEdges: ColumnEdge[], fields: SchemaField[], nodeUrn: string) { + return highlightedEdges + .filter( + (edge) => + (edge.sourceUrn === nodeUrn && !!fields?.find((field) => field.fieldPath === edge.sourceField)) || + (edge.targetUrn === nodeUrn && !!fields?.find((field) => field.fieldPath === edge.targetField)), + ) + .map((edge) => { + if (edge.sourceUrn === nodeUrn) { + return edge.sourceField; + } + if (edge.targetUrn === nodeUrn) { + return edge.targetField; + } + return ''; + }); +} + +export function sortRelatedLineageColumns( + highlightedColumnsForNode: string[], + fields: SchemaField[], + nodeUrn: string, + columnsByUrn: Record, +) { + return { + ...columnsByUrn, + [nodeUrn || 'noop']: [...fields].sort( + (fieldA, fieldB) => + highlightedColumnsForNode.indexOf(fieldB.fieldPath) - + highlightedColumnsForNode.indexOf(fieldA.fieldPath), + ), + }; +} + +export function sortColumnsByDefault( + columnsByUrn: Record, + fields: SchemaField[], + nodeFields: SchemaField[], + nodeUrn: string, +) { + return { + ...columnsByUrn, + [nodeUrn || 'noop']: [...fields].sort( + (fieldA, fieldB) => + (nodeFields.findIndex((field) => field.fieldPath === fieldA.fieldPath) || 0) - + (nodeFields.findIndex((field) => field.fieldPath === fieldB.fieldPath) || 0), + ), + }; +} + +export function populateColumnsByUrn( + columnsByUrn: Record, + fetchedEntities: { [x: string]: FetchedEntity }, + setColumnsByUrn: (colsByUrn: Record) => void, +) { + let populatedColumnsByUrn = { ...columnsByUrn }; + Object.entries(fetchedEntities).forEach(([urn, fetchedEntity]) => { + if (fetchedEntity.schemaMetadata && !columnsByUrn[urn]) { + populatedColumnsByUrn = { ...populatedColumnsByUrn, [urn]: fetchedEntity.schemaMetadata.fields }; + } + }); + setColumnsByUrn(populatedColumnsByUrn); +} + +export function haveDisplayedFieldsChanged(displayedFields: SchemaField[], previousDisplayedFields?: SchemaField[]) { + if (!previousDisplayedFields) return true; + let hasChanged = false; + displayedFields.forEach((field, index) => { + if ( + previousDisplayedFields && + previousDisplayedFields[index] && + (previousDisplayedFields[index] as any).fieldPath !== field.fieldPath + ) { + hasChanged = true; + } + }); + return hasChanged; +} + +export function filterColumns( + filterText: string, + node: { x: number; y: number; data: Omit }, + setColumnsByUrn: (value: React.SetStateAction>) => void, +) { + const filteredFields = node.data.schemaMetadata?.fields.filter((field) => field.fieldPath.includes(filterText)); + if (filteredFields) { + setColumnsByUrn((colsByUrn) => ({ ...colsByUrn, [node.data.urn || 'noop']: filteredFields })); + } +} diff --git a/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts b/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts index d1571c01529ebc..ee90173a2b75df 100644 --- a/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts +++ b/datahub-web-react/src/app/lineage/utils/constructFetchedNode.ts @@ -62,6 +62,7 @@ export default function constructFetchedNode( platform: fetchedNode?.platform, status: fetchedNode.status, siblingPlatforms: fetchedNode.siblingPlatforms, + schemaMetadata: fetchedNode.schemaMetadata, }; // eslint-disable-next-line no-param-reassign diff --git a/datahub-web-react/src/app/lineage/utils/constructTree.ts b/datahub-web-react/src/app/lineage/utils/constructTree.ts index 4207ef6a0f64ab..371db4229659f0 100644 --- a/datahub-web-react/src/app/lineage/utils/constructTree.ts +++ b/datahub-web-react/src/app/lineage/utils/constructTree.ts @@ -23,6 +23,7 @@ export default function constructTree( platform: fetchedEntity?.platform, unexploredChildren: 0, siblingPlatforms: fetchedEntity?.siblingPlatforms, + schemaMetadata: fetchedEntity?.schemaMetadata, }; const lineageConfig = entityRegistry.getLineageVizConfig(entityAndType.type, entityAndType.entity); let children: EntityAndType[] = []; diff --git a/datahub-web-react/src/app/lineage/utils/extendAsyncEntities.ts b/datahub-web-react/src/app/lineage/utils/extendAsyncEntities.ts index ac28707833ee0d..79f8bc2c4324e6 100644 --- a/datahub-web-react/src/app/lineage/utils/extendAsyncEntities.ts +++ b/datahub-web-react/src/app/lineage/utils/extendAsyncEntities.ts @@ -1,7 +1,64 @@ +import { SchemaFieldRef } from '../../../types.generated'; import EntityRegistry from '../../entity/EntityRegistry'; -import { EntityAndType, FetchedEntities } from '../types'; +import { EntityAndType, FetchedEntities, FetchedEntity } from '../types'; + +const breakFieldUrn = (ref: SchemaFieldRef) => { + const before = ref.urn; + const after = ref.path; + + return [before, after]; +}; + +function updateFineGrainedMap( + fineGrainedMap: any, + upstreamEntityUrn: string, + upstreamField: string, + downstreamEntityUrn: string, + downstreamField: string, +) { + const mapForUrn = fineGrainedMap.forward[upstreamEntityUrn] || {}; + const mapForField = mapForUrn[upstreamField] || {}; + const listForDownstream = mapForField[downstreamEntityUrn] || []; + listForDownstream.push(downstreamField); + + // eslint-disable-next-line no-param-reassign + fineGrainedMap.forward[upstreamEntityUrn] = mapForUrn; + mapForUrn[upstreamField] = mapForField; + mapForField[downstreamEntityUrn] = listForDownstream; + + const mapForUrnReverse = fineGrainedMap.reverse[downstreamEntityUrn] || {}; + const mapForFieldReverse = mapForUrnReverse[downstreamField] || {}; + const listForDownstreamReverse = mapForFieldReverse[upstreamEntityUrn] || []; + listForDownstreamReverse.push(upstreamField); + + // eslint-disable-next-line no-param-reassign + fineGrainedMap.reverse[downstreamEntityUrn] = mapForUrnReverse; + mapForUrnReverse[downstreamField] = mapForFieldReverse; + mapForFieldReverse[upstreamEntityUrn] = listForDownstreamReverse; +} + +function extendColumnLineage(lineageVizConfig: FetchedEntity, fineGrainedMap: any) { + if (lineageVizConfig.fineGrainedLineages && lineageVizConfig.fineGrainedLineages.length > 0) { + lineageVizConfig.fineGrainedLineages.forEach((fineGrainedLineage) => { + fineGrainedLineage.upstreams?.forEach((upstream) => { + const [upstreamEntityUrn, upstreamField] = breakFieldUrn(upstream); + fineGrainedLineage.downstreams?.forEach((downstream) => { + const [downstreamEntityUrn, downstreamField] = breakFieldUrn(downstream); + updateFineGrainedMap( + fineGrainedMap, + upstreamEntityUrn, + upstreamField, + downstreamEntityUrn, + downstreamField, + ); + }); + }); + }); + } +} export default function extendAsyncEntities( + fineGrainedMap: any, fetchedEntities: FetchedEntities, entityRegistry: EntityRegistry, entityAndType: EntityAndType, @@ -15,6 +72,8 @@ export default function extendAsyncEntities( if (!lineageVizConfig) return fetchedEntities; + extendColumnLineage(lineageVizConfig, fineGrainedMap); + return { ...fetchedEntities, [entityAndType.entity.urn]: { diff --git a/datahub-web-react/src/app/lineage/utils/highlightColumnLineage.ts b/datahub-web-react/src/app/lineage/utils/highlightColumnLineage.ts new file mode 100644 index 00000000000000..7d7b0220ebfee8 --- /dev/null +++ b/datahub-web-react/src/app/lineage/utils/highlightColumnLineage.ts @@ -0,0 +1,51 @@ +import { ColumnEdge } from '../types'; + +function highlightDownstreamColumnLineage( + sourceField: string, + sourceUrn: string, + edges: ColumnEdge[], + fineGrainedMap: any, +) { + const forwardLineage = fineGrainedMap.forward[sourceUrn]?.[sourceField]; + if (forwardLineage) { + Object.entries(forwardLineage).forEach((entry) => { + const [targetUrn, fieldPaths] = entry; + (fieldPaths as string[]).forEach((targetField) => { + edges.push({ sourceUrn, sourceField, targetUrn, targetField }); + highlightDownstreamColumnLineage(targetField, targetUrn, edges, fineGrainedMap); + }); + }); + } +} + +function highlightUpstreamColumnLineage( + targetField: string, + targetUrn: string, + edges: ColumnEdge[], + fineGrainedMap: any, +) { + const reverseLineage = fineGrainedMap.reverse[targetUrn]?.[targetField]; + if (reverseLineage) { + Object.entries(reverseLineage).forEach((entry) => { + const [sourceUrn, fieldPaths] = entry; + (fieldPaths as string[]).forEach((sourceField) => { + edges.push({ targetUrn, targetField, sourceUrn, sourceField }); + highlightUpstreamColumnLineage(sourceField, sourceUrn, edges, fineGrainedMap); + }); + }); + } +} + +export function highlightColumnLineage( + fieldPath: string, + fineGrainedMap: any, + urn: string, + setHighlightedEdges: (edgesToHighlight: ColumnEdge[]) => void, +) { + const edgesToHighlight: ColumnEdge[] = []; + if (urn) { + highlightDownstreamColumnLineage(fieldPath, urn, edgesToHighlight, fineGrainedMap); + highlightUpstreamColumnLineage(fieldPath, urn, edgesToHighlight, fineGrainedMap); + } + setHighlightedEdges(edgesToHighlight); +} diff --git a/datahub-web-react/src/app/lineage/utils/layoutTree.ts b/datahub-web-react/src/app/lineage/utils/layoutTree.ts index 8f12c5069eb62b..c13d7f09563cec 100644 --- a/datahub-web-react/src/app/lineage/utils/layoutTree.ts +++ b/datahub-web-react/src/app/lineage/utils/layoutTree.ts @@ -1,7 +1,15 @@ -import { CURVE_PADDING, HORIZONTAL_SPACE_PER_LAYER, VERTICAL_SPACE_BETWEEN_NODES } from '../constants'; -import { width as nodeWidth } from '../LineageEntityNode'; +import { SchemaField } from '../../../types.generated'; +import { + COLUMN_HEIGHT, + CURVE_PADDING, + EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT, + HORIZONTAL_SPACE_PER_LAYER, + NUM_COLUMNS_PER_PAGE, + VERTICAL_SPACE_BETWEEN_NODES, + width as nodeWidth, +} from '../constants'; import { Direction, NodeData, VizEdge, VizNode } from '../types'; -import { nodeHeightFromTitleLength } from './nodeHeightFromTitleLength'; +import { getTitleHeight, nodeHeightFromTitleLength } from './titleUtils'; type ProcessArray = { parent: VizNode | null; @@ -11,29 +19,26 @@ type ProcessArray = { const INSIDE_NODE_SHIFT = nodeWidth / 2 - 19; const HEADER_HEIGHT = 125; +const UPSTREAM_X_MODIFIER = -1; +const UPSTREAM_DIRECTION_SHIFT = -20; +const COLUMN_HEIGHT_BUFFER = 1.2; -export default function layoutTree( +function layoutNodesForOneDirection( data: NodeData, direction: Direction, draggedNodes: Record, canvasHeight: number, expandTitles: boolean, -): { - nodesToRender: VizNode[]; - edgesToRender: VizEdge[]; - nodesByUrn: Record; - height: number; - layers: number; -} { - const nodesToRender: VizNode[] = []; - const edgesToRender: VizEdge[] = []; - let maxHeight = 0; - + showColumns: boolean, + collapsedColumnsNodes: any, + nodesToRender: VizNode[], + edgesToRender: VizEdge[], +) { const nodesByUrn: Record = {}; - const xModifier = direction === Direction.Downstream ? 1 : -1; - const directionShift = direction === Direction.Downstream ? 0 : -20; + const xModifier = direction === Direction.Downstream ? 1 : UPSTREAM_X_MODIFIER; + const directionShift = direction === Direction.Downstream ? 0 : UPSTREAM_DIRECTION_SHIFT; - let currentLayer = 0; + let numInCurrentLayer = 0; let nodesInCurrentLayer: ProcessArray = [{ parent: null, node: data }]; let nodesInNextLayer: ProcessArray = []; @@ -50,15 +55,13 @@ export default function layoutTree( const layerSize = filteredNodesInCurrentLayer.length; - const layerHeight = filteredNodesInCurrentLayer - .map(({ node }) => nodeHeightFromTitleLength(expandTitles ? node.expandedName || node.name : undefined)) - .reduce((acc, height) => acc + height, 0); - - maxHeight = Math.max(maxHeight, layerHeight); - // approximate the starting position assuming each node has a 1 line title (its ok to be a bit off here) let currentXPosition = - -((nodeHeightFromTitleLength(undefined) + VERTICAL_SPACE_BETWEEN_NODES) * (layerSize - 1)) / 2 + + -( + (nodeHeightFromTitleLength(undefined, undefined, showColumns, false) + VERTICAL_SPACE_BETWEEN_NODES) * + (layerSize - 1) + ) / + 2 + canvasHeight / 2 + HEADER_HEIGHT; @@ -80,15 +83,21 @@ export default function layoutTree( data: node, x: draggedNodes[node.urn].x, y: draggedNodes[node.urn].y, + direction, } : { data: node, x: currentXPosition, - y: HORIZONTAL_SPACE_PER_LAYER * currentLayer * xModifier, + y: HORIZONTAL_SPACE_PER_LAYER * numInCurrentLayer * xModifier, + direction, }; currentXPosition += - nodeHeightFromTitleLength(expandTitles ? node.expandedName || node.name : undefined) + - VERTICAL_SPACE_BETWEEN_NODES; + nodeHeightFromTitleLength( + expandTitles ? node.expandedName || node.name : undefined, + node.schemaMetadata, + showColumns, + !!collapsedColumnsNodes[node?.urn || 'no-op'], // avoid indexing on undefined if node is undefined + ) + VERTICAL_SPACE_BETWEEN_NODES; nodesByUrn[node.urn] = vizNodeForNode; nodesToRender.push(vizNodeForNode); @@ -142,8 +151,243 @@ export default function layoutTree( nodesInCurrentLayer = nodesInNextLayer; nodesInNextLayer = []; - currentLayer++; + numInCurrentLayer++; + } + return { numInCurrentLayer, nodesByUrn }; +} + +interface DrawColumnEdgeProps { + targetNode?: VizNode; + currentNode?: VizNode; + targetField: string; + targetFields: SchemaField[]; + targetTitleHeight: number; + collapsedColumnsNodes: any; + sourceFieldX: number; + sourceFieldY: number; + edgesToRender: VizEdge[]; + sourceField: string; + entityUrn: string; + targetUrn: string; + visibleColumnsByUrn: Record>; +} + +function drawColumnEdge({ + targetNode, + currentNode, + targetField, + targetFields, + targetTitleHeight, + collapsedColumnsNodes, + sourceFieldX, + sourceFieldY, + edgesToRender, + sourceField, + entityUrn, + targetUrn, + visibleColumnsByUrn, +}: DrawColumnEdgeProps) { + const targetFieldIndex = targetFields.findIndex((candidate) => candidate.fieldPath === targetField) || 0; + const targetFieldY = targetNode?.y || 0 + 1; + let targetFieldX = (targetNode?.x || 0) + 35 + targetTitleHeight; + if (!collapsedColumnsNodes[targetNode?.data.urn || 'no-op']) { + if (!visibleColumnsByUrn[targetUrn]?.has(targetField)) { + targetFieldX = + (targetNode?.x || 0) + + targetTitleHeight + + EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT + + (NUM_COLUMNS_PER_PAGE + COLUMN_HEIGHT_BUFFER) * COLUMN_HEIGHT + + 1; + } else { + targetFieldX = + (targetNode?.x || 0) + + targetTitleHeight + + EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT + + ((targetFieldIndex % NUM_COLUMNS_PER_PAGE) + COLUMN_HEIGHT_BUFFER) * COLUMN_HEIGHT + + 1; + } + } + + if (currentNode && targetNode && sourceFieldX && sourceFieldY && targetFieldX && targetFieldY) { + const curve = [ + { + x: sourceFieldX, + y: sourceFieldY - INSIDE_NODE_SHIFT * UPSTREAM_X_MODIFIER + UPSTREAM_DIRECTION_SHIFT, + }, + { + x: sourceFieldX, + y: sourceFieldY - (INSIDE_NODE_SHIFT + CURVE_PADDING) * UPSTREAM_X_MODIFIER, + }, + { + x: targetFieldX, + y: targetFieldY + (nodeWidth / 2 + CURVE_PADDING) * UPSTREAM_X_MODIFIER, + }, + { + x: targetFieldX, + y: targetFieldY + (nodeWidth / 2 - 15) * UPSTREAM_X_MODIFIER + UPSTREAM_DIRECTION_SHIFT, + }, + ]; + + const vizEdgeForPair = { + source: currentNode, + target: targetNode, + sourceField, + targetField, + curve, + }; + + if ( + !edgesToRender.find( + (edge) => + edge.source.data.urn === entityUrn && + edge.sourceField === sourceField && + edge.target.data.urn === targetUrn && + edge.targetField === targetField, + ) + ) { + edgesToRender.push(vizEdgeForPair); + } + } +} + +function layoutColumnTree( + fineGrainedMap: any, + showColumns: boolean, + nodesToRender: VizNode[], + expandTitles: boolean, + collapsedColumnsNodes: any, + edgesToRender: VizEdge[], + visibleColumnsByUrn: Record>, + columnsByUrn: Record, +) { + const forwardEdges = fineGrainedMap.forward; + if (showColumns) { + Object.keys(forwardEdges).forEach((entityUrn) => { + const fieldPathToEdges = forwardEdges[entityUrn]; + Object.keys(fieldPathToEdges).forEach((sourceField) => { + const fieldForwardEdges = fieldPathToEdges[sourceField]; + + const currentNode = nodesToRender.find((node) => node.data.urn === entityUrn); + const fields = columnsByUrn[currentNode?.data.urn || ''] || []; + const fieldIndex = fields.findIndex((candidate) => candidate.fieldPath === sourceField) || 0; + + const sourceTitleHeight = getTitleHeight( + expandTitles ? currentNode?.data.expandedName || currentNode?.data.name : undefined, + ); + + const sourceFieldY = currentNode?.y || 0 + 1; + let sourceFieldX = (currentNode?.x || 0) + 30 + sourceTitleHeight; + if (!collapsedColumnsNodes[currentNode?.data.urn || 'no-op']) { + if (!visibleColumnsByUrn[entityUrn]?.has(sourceField)) { + sourceFieldX = + (currentNode?.x || 0) + + sourceTitleHeight + + EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT + + (NUM_COLUMNS_PER_PAGE + COLUMN_HEIGHT_BUFFER) * COLUMN_HEIGHT + + 1; + } else { + sourceFieldX = + (currentNode?.x || 0) + + sourceTitleHeight + + EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT + + ((fieldIndex % NUM_COLUMNS_PER_PAGE) + COLUMN_HEIGHT_BUFFER) * COLUMN_HEIGHT + + 1; + } + } + + Object.keys(fieldForwardEdges || {}).forEach((targetUrn) => { + const targetNode = nodesToRender.find((node) => node.data.urn === targetUrn); + const targetFields = columnsByUrn[targetNode?.data.urn || ''] || []; + const targetTitleHeight = getTitleHeight( + expandTitles ? targetNode?.data.expandedName || targetNode?.data.name : undefined, + ); + + (fieldForwardEdges[targetUrn] || []).forEach((targetField) => { + if ( + (visibleColumnsByUrn[entityUrn]?.has(sourceField) || + visibleColumnsByUrn[targetUrn]?.has(targetField)) && + targetFields.find((field) => field.fieldPath === targetField) && + fields.find((field) => field.fieldPath === sourceField) + ) { + drawColumnEdge({ + targetNode, + currentNode, + targetField, + targetFields, + targetTitleHeight, + collapsedColumnsNodes, + sourceFieldX, + sourceFieldY, + edgesToRender, + sourceField, + entityUrn, + targetUrn, + visibleColumnsByUrn, + }); + } + }); + }); + }); + }); } +} + +export default function layoutTree( + upstreamData: NodeData, + downstreamData: NodeData, + draggedNodes: Record, + canvasHeight: number, + expandTitles: boolean, + showColumns: boolean, + collapsedColumnsNodes: any, + fineGrainedMap: any, + visibleColumnsByUrn: Record>, + columnsByUrn: Record, +): { + nodesToRender: VizNode[]; + edgesToRender: VizEdge[]; + nodesByUrn: Record; + layers: number; +} { + const nodesToRender: VizNode[] = []; + const edgesToRender: VizEdge[] = []; + + const { numInCurrentLayer: numUpstream, nodesByUrn: upstreamNodesByUrn } = layoutNodesForOneDirection( + upstreamData, + Direction.Upstream, + draggedNodes, + canvasHeight, + expandTitles, + showColumns, + collapsedColumnsNodes, + nodesToRender, + edgesToRender, + ); + + const { numInCurrentLayer: numDownstream, nodesByUrn: downstreamNodesByUrn } = layoutNodesForOneDirection( + downstreamData, + Direction.Downstream, + draggedNodes, + canvasHeight, + expandTitles, + showColumns, + collapsedColumnsNodes, + nodesToRender, + edgesToRender, + ); + + const nodesByUrn = { ...upstreamNodesByUrn, ...downstreamNodesByUrn }; + + layoutColumnTree( + fineGrainedMap, + showColumns, + nodesToRender, + expandTitles, + collapsedColumnsNodes, + edgesToRender, + visibleColumnsByUrn, + columnsByUrn, + ); - return { nodesToRender, edgesToRender, height: maxHeight, layers: currentLayer - 1, nodesByUrn }; + return { nodesToRender, edgesToRender, layers: numUpstream + numDownstream - 1, nodesByUrn }; } diff --git a/datahub-web-react/src/app/lineage/utils/navigateToLineageUrl.ts b/datahub-web-react/src/app/lineage/utils/navigateToLineageUrl.ts index 92c2964b9b6f03..0cb5af03a650c9 100644 --- a/datahub-web-react/src/app/lineage/utils/navigateToLineageUrl.ts +++ b/datahub-web-react/src/app/lineage/utils/navigateToLineageUrl.ts @@ -1,12 +1,14 @@ import * as QueryString from 'query-string'; import { RouteComponentProps } from 'react-router-dom'; import { SEPARATE_SIBLINGS_URL_PARAM } from '../../entity/shared/siblingUtils'; +import { SHOW_COLUMNS_URL_PARAMS } from './useIsShowColumnsMode'; export const navigateToLineageUrl = ({ location, history, isLineageMode, isHideSiblingMode, + showColumns, }: { location: { search: string; @@ -15,6 +17,7 @@ export const navigateToLineageUrl = ({ history: RouteComponentProps['history']; isLineageMode: boolean; isHideSiblingMode?: boolean; + showColumns?: boolean; }) => { const parsedSearch = QueryString.parse(location.search, { arrayFormat: 'comma' }); let newSearch: any = { @@ -27,6 +30,12 @@ export const navigateToLineageUrl = ({ [SEPARATE_SIBLINGS_URL_PARAM]: isHideSiblingMode, }; } + if (showColumns !== undefined) { + newSearch = { + ...newSearch, + [SHOW_COLUMNS_URL_PARAMS]: showColumns, + }; + } const newSearchStringified = QueryString.stringify(newSearch, { arrayFormat: 'comma' }); history.push({ diff --git a/datahub-web-react/src/app/lineage/utils/nodeHeightFromTitleLength.ts b/datahub-web-react/src/app/lineage/utils/titleUtils.ts similarity index 55% rename from datahub-web-react/src/app/lineage/utils/nodeHeightFromTitleLength.ts rename to datahub-web-react/src/app/lineage/utils/titleUtils.ts index 837477644838d7..cee2243b3eedb7 100644 --- a/datahub-web-react/src/app/lineage/utils/nodeHeightFromTitleLength.ts +++ b/datahub-web-react/src/app/lineage/utils/titleUtils.ts @@ -1,3 +1,6 @@ +import { SchemaMetadata } from '../../../types.generated'; +import { COLUMN_HEIGHT, EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT, NUM_COLUMNS_PER_PAGE } from '../constants'; + interface OptionalOptions { font?: string; fontSize?: string; @@ -23,6 +26,7 @@ interface Size { const HEIGHT_WITHOUT_TEXT_HEIGHT = 66; const DEFAULT_TEXT_TO_GET_NON_ZERO_HEIGHT = 'a'; +export const NODE_WIDTH_WITHOUT_TITLE = 61; function createDummyElement(text: string, options: Options): HTMLElement { const element = document.createElement('div'); @@ -83,6 +87,55 @@ const calcualteSize = (text: string, options: OptionalOptions = {}): Size => { return size; }; -export function nodeHeightFromTitleLength(title?: string) { - return Math.floor(calcualteSize(title || DEFAULT_TEXT_TO_GET_NON_ZERO_HEIGHT).height) + HEIGHT_WITHOUT_TEXT_HEIGHT; +export function getTitleHeight(title?: string) { + return Math.floor(calcualteSize(title || DEFAULT_TEXT_TO_GET_NON_ZERO_HEIGHT).height); +} + +export function nodeHeightFromTitleLength( + title?: string, + schemaMetadata?: SchemaMetadata, + showColumns?: boolean, + collapsed?: boolean, +) { + let showColumnBuffer = 0; + let columnPaginationBuffer = 0; + if (showColumns && schemaMetadata) { + if (!collapsed) { + showColumnBuffer = + Math.min(schemaMetadata.fields.length, NUM_COLUMNS_PER_PAGE) * COLUMN_HEIGHT + + EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT; + if (schemaMetadata.fields.length > NUM_COLUMNS_PER_PAGE) { + columnPaginationBuffer = 40; + } + } else { + showColumnBuffer = EXPAND_COLLAPSE_COLUMNS_TOGGLE_HEIGHT; + } + } + return getTitleHeight(title) + HEIGHT_WITHOUT_TEXT_HEIGHT + showColumnBuffer + columnPaginationBuffer; +} + +function truncate(input, length) { + if (!input) return ''; + if (input.length > length) { + return `${input.substring(0, length)}...`; + } + return input; +} + +function getLastTokenOfTitle(title?: string): string { + if (!title) return ''; + + const lastToken = title?.split('.').slice(-1)[0]; + + // if the last token does not contain any content, the string should not be tokenized on `.` + if (lastToken.replace(/\s/g, '').length === 0) { + return title; + } + + return lastToken; +} + +export function getShortenedTitle(title: string, nodeWidth: number) { + const titleWidth = nodeWidth - NODE_WIDTH_WITHOUT_TITLE; + return truncate(getLastTokenOfTitle(title), Math.ceil(titleWidth / 10)); } diff --git a/datahub-web-react/src/app/lineage/utils/useIsShowColumnsMode.ts b/datahub-web-react/src/app/lineage/utils/useIsShowColumnsMode.ts new file mode 100644 index 00000000000000..6819910edddc55 --- /dev/null +++ b/datahub-web-react/src/app/lineage/utils/useIsShowColumnsMode.ts @@ -0,0 +1,11 @@ +import * as QueryString from 'query-string'; +import { useLocation } from 'react-router-dom'; + +export const SHOW_COLUMNS_URL_PARAMS = 'show_columns'; + +export function useIsShowColumnsMode() { + const location = useLocation(); + const params = QueryString.parse(location.search, { arrayFormat: 'comma' }); + + return params[SHOW_COLUMNS_URL_PARAMS] === 'true'; +} diff --git a/datahub-web-react/src/app/lineage/utils/useResetPageIndexAfterSelect.ts b/datahub-web-react/src/app/lineage/utils/useResetPageIndexAfterSelect.ts new file mode 100644 index 00000000000000..795f0183504fdd --- /dev/null +++ b/datahub-web-react/src/app/lineage/utils/useResetPageIndexAfterSelect.ts @@ -0,0 +1,32 @@ +import { useContext, useEffect } from 'react'; +import { SchemaField } from '../../../types.generated'; +import usePrevious from '../../shared/usePrevious'; +import { NUM_COLUMNS_PER_PAGE } from '../constants'; +import { getHighlightedColumnsForNode } from './columnLineageUtils'; +import { LineageExplorerContext } from './LineageExplorerContext'; + +export function useResetPageIndexAfterSelect( + nodeUrn: string, + fields: SchemaField[], + setPageIndex: (pageIndex: number) => void, +) { + const { selectedField, highlightedEdges } = useContext(LineageExplorerContext); + const previousSelectedField = usePrevious(selectedField); + + useEffect(() => { + // all of this logic is to determine if we've reordered this node's fields when clicking a selected field somewhere + if ( + selectedField && + previousSelectedField !== selectedField && + selectedField.urn !== nodeUrn && + fields.length >= NUM_COLUMNS_PER_PAGE + ) { + const highlightedColumnsForNode = getHighlightedColumnsForNode(highlightedEdges, fields, nodeUrn || ''); + + if (highlightedColumnsForNode.length > 0) { + // at this point we know this node's columns have been reordered, set them on first page + setPageIndex(0); + } + } + }, [selectedField, previousSelectedField, nodeUrn, fields, highlightedEdges, setPageIndex]); +} diff --git a/datahub-web-react/src/app/lineage/utils/useSortColumnsBySelectedField.ts b/datahub-web-react/src/app/lineage/utils/useSortColumnsBySelectedField.ts new file mode 100644 index 00000000000000..b9ced8cabebbec --- /dev/null +++ b/datahub-web-react/src/app/lineage/utils/useSortColumnsBySelectedField.ts @@ -0,0 +1,46 @@ +import { useContext, useEffect } from 'react'; +import usePrevious from '../../shared/usePrevious'; +import { NUM_COLUMNS_PER_PAGE } from '../constants'; +import { FetchedEntity } from '../types'; +import { getHighlightedColumnsForNode, sortColumnsByDefault, sortRelatedLineageColumns } from './columnLineageUtils'; +import { LineageExplorerContext } from './LineageExplorerContext'; + +export default function useSortColumnsBySelectedField(fetchedEntities: { [x: string]: FetchedEntity }) { + const { highlightedEdges, selectedField, columnsByUrn, setColumnsByUrn } = useContext(LineageExplorerContext); + const previousSelectedField = usePrevious(selectedField); + + useEffect(() => { + let updatedColumnsByUrn = { ...columnsByUrn }; + + if (selectedField && previousSelectedField !== selectedField) { + Object.entries(columnsByUrn).forEach(([urn, columns]) => { + if (selectedField.urn !== urn && columns.length >= NUM_COLUMNS_PER_PAGE) { + const highlightedColumnsForNode = getHighlightedColumnsForNode(highlightedEdges, columns, urn); + + if (highlightedColumnsForNode.length > 0) { + updatedColumnsByUrn = sortRelatedLineageColumns( + highlightedColumnsForNode, + columns, + urn, + updatedColumnsByUrn, + ); + } + } + }); + setColumnsByUrn(updatedColumnsByUrn); + } else if (!selectedField && previousSelectedField !== selectedField) { + Object.entries(columnsByUrn).forEach(([urn, columns]) => { + const fetchedEntity = fetchedEntities[urn]; + if (fetchedEntity && fetchedEntity.schemaMetadata) { + updatedColumnsByUrn = sortColumnsByDefault( + updatedColumnsByUrn, + columns, + fetchedEntity.schemaMetadata.fields, + urn, + ); + } + }); + setColumnsByUrn(updatedColumnsByUrn); + } + }, [selectedField, previousSelectedField, highlightedEdges, columnsByUrn, fetchedEntities, setColumnsByUrn]); +} diff --git a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx index 731f5cb53b8cdf..ca313df209f028 100644 --- a/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx +++ b/datahub-web-react/src/app/preview/DefaultPreviewCard.tsx @@ -288,7 +288,9 @@ export default function DefaultPreviewCard({ )} - {deprecation?.deprecated && } + {deprecation?.deprecated && ( + + )} {externalUrl && ( diff --git a/datahub-web-react/src/app/recommendations/renderer/component/GlossaryTermSearchList.tsx b/datahub-web-react/src/app/recommendations/renderer/component/GlossaryTermSearchList.tsx index 2ee11c46d522de..4ccb2fc8eb0234 100644 --- a/datahub-web-react/src/app/recommendations/renderer/component/GlossaryTermSearchList.tsx +++ b/datahub-web-react/src/app/recommendations/renderer/component/GlossaryTermSearchList.tsx @@ -53,7 +53,7 @@ export const GlossaryTermSearchList = ({ content, onClick }: Props) => { filters: [ { field: 'glossaryTerms', - value: term.urn, + values: [term.urn], }, ], history, diff --git a/datahub-web-react/src/app/recommendations/renderer/component/TagSearchList.tsx b/datahub-web-react/src/app/recommendations/renderer/component/TagSearchList.tsx index 91b3ab65121f41..18a82a8595ceda 100644 --- a/datahub-web-react/src/app/recommendations/renderer/component/TagSearchList.tsx +++ b/datahub-web-react/src/app/recommendations/renderer/component/TagSearchList.tsx @@ -44,7 +44,7 @@ export const TagSearchList = ({ content, onClick }: Props) => { filters: [ { field: 'tags', - value: tag.urn, + values: [tag.urn], }, ], history, diff --git a/datahub-web-react/src/app/search/AdvancedSearchAddFilterSelect.tsx b/datahub-web-react/src/app/search/AdvancedSearchAddFilterSelect.tsx new file mode 100644 index 00000000000000..3f6e679e62866c --- /dev/null +++ b/datahub-web-react/src/app/search/AdvancedSearchAddFilterSelect.tsx @@ -0,0 +1,53 @@ +import { Select } from 'antd'; +import * as React from 'react'; +import styled from 'styled-components'; +import { PlusOutlined } from '@ant-design/icons'; + +import { FacetFilterInput } from '../../types.generated'; +import { FIELD_TO_LABEL } from './utils/constants'; + +const StyledPlus = styled(PlusOutlined)` + margin-right: 6px; +`; + +interface Props { + selectedFilters: Array; + onFilterFieldSelect: (value) => void; +} + +const { Option } = Select; + +export const AdvancedSearchAddFilterSelect = ({ selectedFilters, onFilterFieldSelect }: Props) => { + return ( + + ); +}; diff --git a/datahub-web-react/src/app/search/AdvancedSearchFilter.tsx b/datahub-web-react/src/app/search/AdvancedSearchFilter.tsx index a4162defb33fa2..ebf0e585a4fea8 100644 --- a/datahub-web-react/src/app/search/AdvancedSearchFilter.tsx +++ b/datahub-web-react/src/app/search/AdvancedSearchFilter.tsx @@ -14,6 +14,7 @@ type Props = { filter: FacetFilterInput; onClose: () => void; onUpdate: (newValue: FacetFilterInput) => void; + loading: boolean; }; const FilterContainer = styled.div` @@ -46,7 +47,7 @@ const FilterFieldLabel = styled.span` margin-right: 2px; `; -export const AdvancedSearchFilter = ({ facet, filter, onClose, onUpdate }: Props) => { +export const AdvancedSearchFilter = ({ facet, filter, onClose, onUpdate, loading }: Props) => { const [isEditing, setIsEditing] = useState(false); return ( <> @@ -73,7 +74,7 @@ export const AdvancedSearchFilter = ({ facet, filter, onClose, onUpdate }: Props - + {!loading && } {isEditing && ( { const newFilter: FacetFilterInput = { field: filter.field, - value: '', values: values as string[], condition: filter.condition, negated: filter.negated, diff --git a/datahub-web-react/src/app/search/AdvancedSearchFilterOverallUnionTypeSelect.tsx b/datahub-web-react/src/app/search/AdvancedSearchFilterOverallUnionTypeSelect.tsx new file mode 100644 index 00000000000000..d74a4c2bc3229b --- /dev/null +++ b/datahub-web-react/src/app/search/AdvancedSearchFilterOverallUnionTypeSelect.tsx @@ -0,0 +1,44 @@ +import { Select } from 'antd'; +import React from 'react'; +import styled from 'styled-components/macro'; + +import { ANTD_GRAY } from '../entity/shared/constants'; +import { UnionType } from './utils/constants'; + +type Props = { + unionType: UnionType; + onUpdate: (newValue: UnionType) => void; +}; + +const { Option } = Select; + +const StyledSelect = styled(Select)` + border-radius: 5px; + background: ${ANTD_GRAY[4]}; + :hover { + background: ${ANTD_GRAY[4.5]}; + } +`; + +export const AdvancedSearchFilterOverallUnionTypeSelect = ({ unionType, onUpdate }: Props) => { + return ( + <> + { + if ((newValue as any) !== unionType) { + onUpdate(newValue as any); + } + }} + size="small" + dropdownMatchSelectWidth={false} + > + + + + + ); +}; diff --git a/datahub-web-react/src/app/search/AdvancedSearchFilterValuesSection.tsx b/datahub-web-react/src/app/search/AdvancedSearchFilterValuesSection.tsx index f08eacc5b8b4ed..eaa656338331d4 100644 --- a/datahub-web-react/src/app/search/AdvancedSearchFilterValuesSection.tsx +++ b/datahub-web-react/src/app/search/AdvancedSearchFilterValuesSection.tsx @@ -30,7 +30,7 @@ export const AdvancedSearchFilterValuesSection = ({ facet, filter }: Props) => { return ( - + ); })} diff --git a/datahub-web-react/src/app/search/AdvancedSearchFilters.tsx b/datahub-web-react/src/app/search/AdvancedSearchFilters.tsx new file mode 100644 index 00000000000000..f4e70e1b9007d4 --- /dev/null +++ b/datahub-web-react/src/app/search/AdvancedSearchFilters.tsx @@ -0,0 +1,126 @@ +import * as React from 'react'; +import { useState } from 'react'; +import styled from 'styled-components'; + +import { FacetFilterInput, FacetMetadata, FilterOperator } from '../../types.generated'; +import { ANTD_GRAY } from '../entity/shared/constants'; +import { AdvancedSearchFilter } from './AdvancedSearchFilter'; +import { AdvancedSearchFilterOverallUnionTypeSelect } from './AdvancedSearchFilterOverallUnionTypeSelect'; +import { AdvancedFilterSelectValueModal } from './AdvancedFilterSelectValueModal'; +import { FIELDS_THAT_USE_CONTAINS_OPERATOR, UnionType } from './utils/constants'; +import { AdvancedSearchAddFilterSelect } from './AdvancedSearchAddFilterSelect'; + +export const SearchFilterWrapper = styled.div` + min-height: 100%; + overflow: auto; + margin-top: 6px; + margin-left: 12px; + margin-right: 12px; + + &::-webkit-scrollbar { + height: 12px; + width: 1px; + background: #f2f2f2; + } + &::-webkit-scrollbar-thumb { + background: #cccccc; + -webkit-border-radius: 1ex; + -webkit-box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.75); + } +`; + +const AnyAllSection = styled.div` + padding: 6px; + color: ${ANTD_GRAY[8]}; +`; + +const EmptyStateSection = styled.div` + border-radius: 5px; + background-color: ${ANTD_GRAY[2]}; + padding: 22px; + margin-top: 10px; +`; + +interface Props { + selectedFilters: Array; + facets: Array; + onFilterSelect: (newFilters: Array) => void; + onChangeUnionType: (unionType: UnionType) => void; + unionType?: UnionType; + loading: boolean; +} + +export const AdvancedSearchFilters = ({ + unionType = UnionType.AND, + facets, + selectedFilters, + onFilterSelect, + onChangeUnionType, + loading, +}: Props) => { + const [filterField, setFilterField] = useState(null); + + const onFilterFieldSelect = (value) => { + setFilterField(value.value); + }; + + const onSelectValueFromModal = (values) => { + if (!filterField) return; + + const newFilter: FacetFilterInput = { + field: filterField, + values: values as string[], + condition: FIELDS_THAT_USE_CONTAINS_OPERATOR.includes(filterField) + ? FilterOperator.Contain + : FilterOperator.Equal, + }; + onFilterSelect([...selectedFilters, newFilter]); + }; + + return ( + + + {selectedFilters?.length >= 2 && ( + + Show results that match{' '} + onChangeUnionType(newValue)} + /> + + )} + {selectedFilters.map((filter) => ( + facet.field === filter.field) || facets[0]} + loading={loading} + filter={filter} + onClose={() => { + onFilterSelect(selectedFilters.filter((f) => f !== filter)); + }} + onUpdate={(newValue) => { + onFilterSelect( + selectedFilters.map((f) => { + if (f === filter) { + return newValue; + } + return f; + }), + ); + }} + /> + ))} + {filterField && ( + facet.field === filterField) || null} + onCloseModal={() => setFilterField(null)} + filterField={filterField} + onSelect={onSelectValueFromModal} + /> + )} + {selectedFilters?.length === 0 && No filters applied, add one above.} + + ); +}; diff --git a/datahub-web-react/src/app/search/EditTextModal.tsx b/datahub-web-react/src/app/search/EditTextModal.tsx index dd69547604b610..43580d2e2b497e 100644 --- a/datahub-web-react/src/app/search/EditTextModal.tsx +++ b/datahub-web-react/src/app/search/EditTextModal.tsx @@ -21,13 +21,17 @@ export const EditTextModal = ({ defaultValue, onCloseModal, onOk, title }: Props - } > - setStagedValue(e.target.value)} value={stagedValue} /> + setStagedValue(e.target.value)} value={stagedValue} /> ); }; diff --git a/datahub-web-react/src/app/search/PostLinkCard.tsx b/datahub-web-react/src/app/search/PostLinkCard.tsx new file mode 100644 index 00000000000000..5e780ccefb1f14 --- /dev/null +++ b/datahub-web-react/src/app/search/PostLinkCard.tsx @@ -0,0 +1,94 @@ +import React from 'react'; +// import { Link } from 'react-router-dom'; +import { Button, Image, Typography } from 'antd'; +import { ArrowRightOutlined } from '@ant-design/icons'; +import styled from 'styled-components/macro'; +import { ANTD_GRAY } from '../entity/shared/constants'; +import { Post } from '../../types.generated'; + +const CardContainer = styled(Button)<{ isLastCardInRow?: boolean }>` + display: flex; + flex-direction: row; + justify-content: space-between; + margin-right: ${(props) => (props.isLastCardInRow ? '0%' : '4%')}; + margin-left: 12px; + margin-bottom: 12px; + width: 29%; + height: 100px; + border: 1px solid ${ANTD_GRAY[4]}; + border-radius: 12px; + box-shadow: ${(props) => props.theme.styles['box-shadow']}; + &&:hover { + box-shadow: ${(props) => props.theme.styles['box-shadow-hover']}; + } + white-space: unset; +`; + +const LogoContainer = styled.div` + margin-top: 25px; + margin-left: 25px; + margin-right: 40px; +`; + +const PlatformLogo = styled(Image)` + width: auto; + object-fit: contain; + background-color: transparent; +`; + +const TextContainer = styled.div` + display: flex; + flex: 1; + justify-content: center; + align-items: start; + flex-direction: column; +`; + +const HeaderText = styled(Typography.Text)` + line-height: 10px; + margin-top: 12px; +`; + +const TitleDiv = styled.div` + display: flex; + justify-content: space-evenly; + align-items: center; + gap: 6px; + font-size: 14px; +`; + +const Title = styled(Typography.Title)` + word-break: break-word; +`; + +const NUM_CARDS_PER_ROW = 3; + +type Props = { + linkPost: Post; + index: number; +}; + +export const PostLinkCard = ({ linkPost, index }: Props) => { + const hasMedia = !!linkPost?.content?.media?.location; + const link = linkPost?.content?.link || ''; + const isLastCardInRow = (index + 1) % NUM_CARDS_PER_ROW === 0; + + return ( + + {hasMedia && ( + + + + )} + + Link + + <TitleDiv> + {linkPost?.content?.title} + <ArrowRightOutlined /> + </TitleDiv> + + + + ); +}; diff --git a/datahub-web-react/src/app/search/PostTextCard.tsx b/datahub-web-react/src/app/search/PostTextCard.tsx new file mode 100644 index 00000000000000..11079f2a379f49 --- /dev/null +++ b/datahub-web-react/src/app/search/PostTextCard.tsx @@ -0,0 +1,65 @@ +import React from 'react'; +import { Typography } from 'antd'; +import styled from 'styled-components/macro'; +import { ANTD_GRAY } from '../entity/shared/constants'; +import { Post } from '../../types.generated'; + +const CardContainer = styled.div` + display: flex; + flex-direction: row; + margin-right: 12px; + margin-left: 12px; + margin-bottom: 12px; + height: 140px; + border: 1px solid ${ANTD_GRAY[4]}; + border-radius: 12px; + box-shadow: ${(props) => props.theme.styles['box-shadow']}; + &&:hover { + box-shadow: ${(props) => props.theme.styles['box-shadow-hover']}; + } + white-space: unset; +`; + +const TextContainer = styled.div` + margin-left: 12px; + display: flex; + justify-content: center; + align-items: start; + flex-direction: column; +`; + +const Title = styled(Typography.Title)` + word-break: break-word; +`; + +const HeaderText = styled(Typography.Text)` + margin-top: 12px; +`; + +const AnnouncementText = styled(Typography.Paragraph)` + font-size: 12px; + color: ${ANTD_GRAY[7]}; +`; + +type Props = { + textPost: Post; +}; + +export const PostTextCard = ({ textPost }: Props) => { + return ( + + + Announcement + + {textPost?.content?.title} + + {textPost?.content?.description} + + + ); +}; diff --git a/datahub-web-react/src/app/search/SearchFiltersSection.tsx b/datahub-web-react/src/app/search/SearchFiltersSection.tsx new file mode 100644 index 00000000000000..5fddd4d81f75d9 --- /dev/null +++ b/datahub-web-react/src/app/search/SearchFiltersSection.tsx @@ -0,0 +1,99 @@ +import { Button } from 'antd'; +import React, { useState } from 'react'; +import styled from 'styled-components/macro'; +import { FacetFilterInput, FacetMetadata } from '../../types.generated'; +import { UnionType } from './utils/constants'; +import { hasAdvancedFilters } from './utils/hasAdvancedFilters'; +import { AdvancedSearchFilters } from './AdvancedSearchFilters'; +import { SimpleSearchFilters } from './SimpleSearchFilters'; + +type Props = { + filters?: Array | null; + selectedFilters: Array; + unionType: UnionType; + loading: boolean; + onChangeFilters: (filters: Array) => void; + onChangeUnionType: (unionType: UnionType) => void; +}; + +const FiltersContainer = styled.div` + display: block; + max-width: 260px; + min-width: 260px; + overflow-wrap: break-word; + border-right: 1px solid; + border-color: ${(props) => props.theme.styles['border-color-base']}; + max-height: 100%; +`; + +const FiltersHeader = styled.div` + font-size: 14px; + font-weight: 600; + + padding-left: 20px; + padding-right: 20px; + padding-bottom: 8px; + + width: 100%; + height: 47px; + line-height: 47px; + border-bottom: 1px solid; + border-color: ${(props) => props.theme.styles['border-color-base']}; + + justify-content: space-between; + display: flex; +`; + +const SearchFilterContainer = styled.div` + padding-top: 10px; +`; + +// This component renders the entire filters section that allows toggling +// between the simplified search experience and advanced search +export const SearchFiltersSection = ({ + filters, + selectedFilters, + unionType, + loading, + onChangeFilters, + onChangeUnionType, +}: Props) => { + const onlyShowAdvancedFilters = hasAdvancedFilters(selectedFilters, unionType); + + const [seeAdvancedFilters, setSeeAdvancedFilters] = useState(onlyShowAdvancedFilters); + return ( + + + Filter + + + + + {seeAdvancedFilters ? ( + onChangeFilters(newFilters)} + onChangeUnionType={onChangeUnionType} + facets={filters || []} + loading={loading} + /> + ) : ( + + onChangeFilters(newFilters)} + /> + + )} + + ); +}; diff --git a/datahub-web-react/src/app/search/SearchPage.tsx b/datahub-web-react/src/app/search/SearchPage.tsx index 0edefb0847ca6c..fd646e715b3255 100644 --- a/datahub-web-react/src/app/search/SearchPage.tsx +++ b/datahub-web-react/src/app/search/SearchPage.tsx @@ -9,10 +9,11 @@ import { SearchResults } from './SearchResults'; import analytics, { EventType } from '../analytics'; import { useGetSearchResultsForMultipleQuery } from '../../graphql/search.generated'; import { SearchCfg } from '../../conf'; -import { ENTITY_FILTER_NAME } from './utils/constants'; +import { ENTITY_FILTER_NAME, UnionType } from './utils/constants'; import { GetSearchResultsParams } from '../entity/shared/components/styled/search/types'; import { EntityAndType } from '../entity/shared/types'; import { scrollToTop } from '../shared/searchUtils'; +import { generateOrFilters } from './utils/generateOrFilters'; type SearchPageParams = { type?: string; @@ -30,13 +31,15 @@ export const SearchPage = () => { const query: string = decodeURIComponent(params.query ? (params.query as string) : ''); const activeType = entityRegistry.getTypeOrDefaultFromPathName(useParams().type || '', undefined); const page: number = params.page && Number(params.page as string) > 0 ? Number(params.page as string) : 1; + const unionType: UnionType = Number(params.unionType as any as UnionType) || UnionType.AND; + const filters: Array = useFilters(params); const filtersWithoutEntities: Array = filters.filter( (filter) => filter.field !== ENTITY_FILTER_NAME, ); const entityFilters: Array = filters .filter((filter) => filter.field === ENTITY_FILTER_NAME) - .map((filter) => filter.value.toUpperCase() as EntityType); + .flatMap((filter) => filter.values.map((value) => value?.toUpperCase() as EntityType)); const [numResultsPerPage, setNumResultsPerPage] = useState(SearchCfg.RESULTS_PER_PAGE); const [isSelectMode, setIsSelectMode] = useState(false); @@ -54,7 +57,8 @@ export const SearchPage = () => { query, start: (page - 1) * numResultsPerPage, count: numResultsPerPage, - filters: filtersWithoutEntities, + filters: [], + orFilters: generateOrFilters(unionType, filtersWithoutEntities), }, }, }); @@ -75,7 +79,8 @@ export const SearchPage = () => { query, start: (page - 1) * SearchCfg.RESULTS_PER_PAGE, count: SearchCfg.RESULTS_PER_PAGE, - filters: filtersWithoutEntities, + filters: [], + orFilters: generateOrFilters(unionType, filtersWithoutEntities), }, }, }); @@ -85,12 +90,16 @@ export const SearchPage = () => { }; const onChangeFilters = (newFilters: Array) => { - navigateToSearchUrl({ type: activeType, query, page: 1, filters: newFilters, history }); + navigateToSearchUrl({ type: activeType, query, page: 1, filters: newFilters, history, unionType }); + }; + + const onChangeUnionType = (newUnionType: UnionType) => { + navigateToSearchUrl({ type: activeType, query, page: 1, filters, history, unionType: newUnionType }); }; const onChangePage = (newPage: number) => { scrollToTop(); - navigateToSearchUrl({ type: activeType, query, page: newPage, filters, history }); + navigateToSearchUrl({ type: activeType, query, page: newPage, filters, history, unionType }); }; /** @@ -139,6 +148,7 @@ export const SearchPage = () => { return ( <> { selectedFilters={filters} loading={loading} onChangeFilters={onChangeFilters} + onChangeUnionType={onChangeUnionType} onChangePage={onChangePage} numResultsPerPage={numResultsPerPage} setNumResultsPerPage={setNumResultsPerPage} diff --git a/datahub-web-react/src/app/search/SearchResults.tsx b/datahub-web-react/src/app/search/SearchResults.tsx index f034c4fac9472b..3198b7caf054fa 100644 --- a/datahub-web-react/src/app/search/SearchResults.tsx +++ b/datahub-web-react/src/app/search/SearchResults.tsx @@ -10,7 +10,6 @@ import { MatchedField, SearchAcrossEntitiesInput, } from '../../types.generated'; -import { SearchFilters } from './SearchFilters'; import { SearchCfg } from '../../conf'; import { SearchResultsRecommendations } from './SearchResultsRecommendations'; import { useGetAuthenticatedUser } from '../useGetAuthenticatedUser'; @@ -23,6 +22,8 @@ import { isListSubset } from '../entity/shared/utils'; import TabToolbar from '../entity/shared/components/styled/TabToolbar'; import { EntityAndType } from '../entity/shared/types'; import { ErrorSection } from '../shared/error/ErrorSection'; +import { UnionType } from './utils/constants'; +import { SearchFiltersSection } from './SearchFiltersSection'; const SearchBody = styled.div` display: flex; @@ -30,14 +31,6 @@ const SearchBody = styled.div` min-height: calc(100vh - 60px); `; -const FiltersContainer = styled.div` - display: block; - max-width: 260px; - min-width: 260px; - border-right: 1px solid; - border-color: ${(props) => props.theme.styles['border-color-base']}; -`; - const ResultContainer = styled.div` flex: 1; margin-bottom: 20px; @@ -61,25 +54,6 @@ const PaginationInfoContainer = styled.div` align-items: center; `; -const FiltersHeader = styled.div` - font-size: 14px; - font-weight: 600; - - padding-left: 20px; - padding-right: 20px; - padding-bottom: 8px; - - width: 100%; - height: 47px; - line-height: 47px; - border-bottom: 1px solid; - border-color: ${(props) => props.theme.styles['border-color-base']}; -`; - -const SearchFilterContainer = styled.div` - padding-top: 10px; -`; - const SearchResultsRecommendationsContainer = styled.div` margin-top: 40px; `; @@ -92,6 +66,7 @@ const StyledTabToolbar = styled(TabToolbar)` const SearchMenuContainer = styled.div``; interface Props { + unionType?: UnionType; query: string; page: number; searchResponse?: { @@ -108,6 +83,7 @@ interface Props { loading: boolean; error: any; onChangeFilters: (filters: Array) => void; + onChangeUnionType: (unionType: UnionType) => void; onChangePage: (page: number) => void; callSearchOnVariables: (variables: { input: SearchAcrossEntitiesInput; @@ -125,6 +101,7 @@ interface Props { } export const SearchResults = ({ + unionType = UnionType.AND, query, page, searchResponse, @@ -132,6 +109,7 @@ export const SearchResults = ({ selectedFilters, loading, error, + onChangeUnionType, onChangeFilters, onChangePage, callSearchOnVariables, @@ -161,17 +139,14 @@ export const SearchResults = ({ {loading && }
- - Filter - - onChangeFilters(newFilters)} - /> - - + <> diff --git a/datahub-web-react/src/app/search/SearchFilter.tsx b/datahub-web-react/src/app/search/SimpleSearchFilter.tsx similarity index 93% rename from datahub-web-react/src/app/search/SearchFilter.tsx rename to datahub-web-react/src/app/search/SimpleSearchFilter.tsx index 25536ab0252bab..93404cd740a82d 100644 --- a/datahub-web-react/src/app/search/SearchFilter.tsx +++ b/datahub-web-react/src/app/search/SimpleSearchFilter.tsx @@ -5,7 +5,7 @@ import * as React from 'react'; import { useState } from 'react'; import styled from 'styled-components'; -import { FacetMetadata } from '../../types.generated'; +import { FacetFilterInput, FacetMetadata } from '../../types.generated'; import { SearchFilterLabel } from './SearchFilterLabel'; import { TRUNCATED_FILTER_LENGTH } from './utils/constants'; @@ -17,10 +17,7 @@ const isGraphDegreeFilter = (field: string) => { type Props = { facet: FacetMetadata; - selectedFilters: Array<{ - field: string; - value: string; - }>; + selectedFilters: Array; onFilterSelect: (selected: boolean, field: string, value: string) => void; defaultDisplayFilters: boolean; }; @@ -57,12 +54,12 @@ const StyledDownOutlined = styled(DownOutlined)` font-size: 10px; `; -export const SearchFilter = ({ facet, selectedFilters, onFilterSelect, defaultDisplayFilters }: Props) => { +export const SimpleSearchFilter = ({ facet, selectedFilters, onFilterSelect, defaultDisplayFilters }: Props) => { const [areFiltersVisible, setAreFiltersVisible] = useState(defaultDisplayFilters); const [expanded, setExpanded] = useState(false); const isFacetSelected = (field, value) => { - return selectedFilters.find((f) => f.field === field && f.value === value) !== undefined; + return selectedFilters.find((f) => f.field === field && f.values.includes(value)) !== undefined; }; // Aggregations filtered for count > 0 or selected = true diff --git a/datahub-web-react/src/app/search/SearchFilters.tsx b/datahub-web-react/src/app/search/SimpleSearchFilters.tsx similarity index 70% rename from datahub-web-react/src/app/search/SearchFilters.tsx rename to datahub-web-react/src/app/search/SimpleSearchFilters.tsx index 309533dab4c363..b235da383c551c 100644 --- a/datahub-web-react/src/app/search/SearchFilters.tsx +++ b/datahub-web-react/src/app/search/SimpleSearchFilters.tsx @@ -1,8 +1,8 @@ import * as React from 'react'; import styled from 'styled-components'; import { useEffect, useState } from 'react'; -import { FacetMetadata } from '../../types.generated'; -import { SearchFilter } from './SearchFilter'; +import { FacetFilterInput, FacetMetadata } from '../../types.generated'; +import { SimpleSearchFilter } from './SimpleSearchFilter'; const TOP_FILTERS = ['degree', 'entity', 'tags', 'glossaryTerms', 'domains', 'owners']; @@ -24,26 +24,15 @@ export const SearchFilterWrapper = styled.div` interface Props { facets: Array; - selectedFilters: Array<{ - field: string; - value: string; - }>; - onFilterSelect: ( - newFilters: Array<{ - field: string; - value: string; - }>, - ) => void; + selectedFilters: Array; + onFilterSelect: (newFilters: Array) => void; loading: boolean; } -export const SearchFilters = ({ facets, selectedFilters, onFilterSelect, loading }: Props) => { +export const SimpleSearchFilters = ({ facets, selectedFilters, onFilterSelect, loading }: Props) => { const [cachedProps, setCachedProps] = useState<{ facets: Array; - selectedFilters: Array<{ - field: string; - value: string; - }>; + selectedFilters: Array; }>({ facets, selectedFilters, @@ -58,8 +47,14 @@ export const SearchFilters = ({ facets, selectedFilters, onFilterSelect, loading const onFilterSelectAndSetCache = (selected: boolean, field: string, value: string) => { const newFilters = selected - ? [...selectedFilters, { field, value }] - : selectedFilters.filter((filter) => filter.field !== field || filter.value !== value); + ? [...selectedFilters, { field, values: [value] }] + : selectedFilters + .map((filter) => + filter.field === field + ? { ...filter, values: filter.values.filter((val) => val !== value) } + : filter, + ) + .filter((filter) => filter.field !== field || !(filter.values.length === 0)); setCachedProps({ ...cachedProps, selectedFilters: newFilters }); onFilterSelect(newFilters); }; @@ -73,7 +68,7 @@ export const SearchFilters = ({ facets, selectedFilters, onFilterSelect, loading return ( {sortedFacets.map((facet) => ( - ): Array { - return Object.entries( - filters.reduce((acc, filter) => { - acc[filter.field] = [...(acc[filter.field] || []), filter.value]; - return acc; - }, {} as Record), - ).map(([field, values]) => ({ field, value: values.join(',') } as FacetFilterInput)); -} diff --git a/datahub-web-react/src/app/search/utils/filtersToQueryStringParams.ts b/datahub-web-react/src/app/search/utils/filtersToQueryStringParams.ts index 04c80af3b9de3a..6a14a2b664eb9a 100644 --- a/datahub-web-react/src/app/search/utils/filtersToQueryStringParams.ts +++ b/datahub-web-react/src/app/search/utils/filtersToQueryStringParams.ts @@ -1,14 +1,36 @@ -import { FacetFilterInput } from '../../../types.generated'; +import { FacetFilterInput, FilterOperator } from '../../../types.generated'; import { encodeComma } from '../../entity/shared/utils'; -import { FILTER_URL_PREFIX } from './constants'; +import { DEGREE_FILTER, FILTER_URL_PREFIX } from './constants'; + +export const URL_PARAM_SEPARATOR = '___'; + +// In the checkbox-based filter view, usually, selecting two facets ANDs them together. +// E.g., if you select the checkbox for tagA and tagB, that means "has tagA AND tagB" +// we need to special case `degree` filter since it is a OR grouping vs the others which are ANDS by default +function reduceFiltersToCombineDegreeFilters(acc: FacetFilterInput[], filter: FacetFilterInput) { + // if we see a `degree` filter and we already have one, combine it with the other degree filter + if (filter.field === DEGREE_FILTER && acc.filter((f) => f.field === DEGREE_FILTER).length > 0) { + // instead of appending this new degree filter, combine it with the previous one and continue + return acc.map((f) => + f.field === DEGREE_FILTER ? { ...f, values: [...f.values, ...filter.values] } : f, + ) as FacetFilterInput[]; + } + return [...acc, filter] as FacetFilterInput[]; +} + +// we need to reformat our list of filters into a dict +function reduceFiltersIntoQueryStringDict(acc, filter, idx) { + acc[ + `${FILTER_URL_PREFIX}${filter.field}${URL_PARAM_SEPARATOR}${String(!!filter.negated)}${URL_PARAM_SEPARATOR}${ + filter.condition || FilterOperator.Equal + }${URL_PARAM_SEPARATOR}${idx}` + ] = [...filter.values.map((value) => encodeComma(value))]; + return acc; +} // transform filters from [{ filter, value }, { filter, value }] to { filter: [value, value ] } that QueryString can parse export default function filtersToQueryStringParams(filters: Array = []) { - return filters.reduce((acc, filter) => { - acc[`${FILTER_URL_PREFIX}${filter.field}`] = [ - ...(acc[`${FILTER_URL_PREFIX}${filter.field}`] || []), - encodeComma(filter.value), - ]; - return acc; - }, {} as Record); + return filters + .reduce(reduceFiltersToCombineDegreeFilters, []) + .reduce(reduceFiltersIntoQueryStringDict, {} as Record); } diff --git a/datahub-web-react/src/app/search/utils/generateOrFilters.ts b/datahub-web-react/src/app/search/utils/generateOrFilters.ts new file mode 100644 index 00000000000000..a798a6ada4b2a8 --- /dev/null +++ b/datahub-web-react/src/app/search/utils/generateOrFilters.ts @@ -0,0 +1,20 @@ +import { FacetFilterInput, OrFilter } from '../../../types.generated'; +import { UnionType } from './constants'; + +export function generateOrFilters(unionType: UnionType, filters: FacetFilterInput[]): OrFilter[] { + if ((filters?.length || 0) === 0) { + return []; + } + + if (unionType === UnionType.OR) { + return filters.map((filter) => ({ + and: [filter], + })); + } + + return [ + { + and: filters, + }, + ]; +} diff --git a/datahub-web-react/src/app/search/utils/hasAdvancedFilters.ts b/datahub-web-react/src/app/search/utils/hasAdvancedFilters.ts new file mode 100644 index 00000000000000..e1b7c104b974b4 --- /dev/null +++ b/datahub-web-react/src/app/search/utils/hasAdvancedFilters.ts @@ -0,0 +1,12 @@ +import { FacetFilterInput } from '../../../types.generated'; +import { ADVANCED_SEARCH_ONLY_FILTERS, UnionType } from './constants'; + +// utility method that looks at the set of filters and determines if the filters can be represented by simple search +export const hasAdvancedFilters = (filters: FacetFilterInput[], unionType: UnionType) => { + return ( + filters.filter( + (filter) => + ADVANCED_SEARCH_ONLY_FILTERS.indexOf(filter.field) >= 0 || filter.negated || unionType === UnionType.OR, + ).length > 0 + ); +}; diff --git a/datahub-web-react/src/app/search/utils/navigateToSearchUrl.ts b/datahub-web-react/src/app/search/utils/navigateToSearchUrl.ts index 3827283bb353c2..73f797900419cf 100644 --- a/datahub-web-react/src/app/search/utils/navigateToSearchUrl.ts +++ b/datahub-web-react/src/app/search/utils/navigateToSearchUrl.ts @@ -4,12 +4,14 @@ import { RouteComponentProps } from 'react-router-dom'; import filtersToQueryStringParams from './filtersToQueryStringParams'; import { EntityType, FacetFilterInput } from '../../../types.generated'; import { PageRoutes } from '../../../conf/Global'; +import { UnionType } from './constants'; export const navigateToSearchUrl = ({ type: newType, query: newQuery, page: newPage = 1, filters: newFilters, + unionType = UnionType.AND, history, }: { type?: EntityType; @@ -17,10 +19,11 @@ export const navigateToSearchUrl = ({ page?: number; filters?: Array; history: RouteComponentProps['history']; + unionType?: UnionType; }) => { const constructedFilters = newFilters || []; if (newType) { - constructedFilters.push({ field: 'entity', value: newType }); + constructedFilters.push({ field: 'entity', values: [newType] }); } const search = QueryString.stringify( @@ -28,6 +31,7 @@ export const navigateToSearchUrl = ({ ...filtersToQueryStringParams(constructedFilters), query: encodeURIComponent(newQuery || ''), page: newPage, + unionType, }, { arrayFormat: 'comma' }, ); @@ -37,33 +41,3 @@ export const navigateToSearchUrl = ({ search, }); }; - -export const navigateToSearchLineageUrl = ({ - entityUrl, - query: newQuery, - page: newPage = 1, - filters: newFilters, - history, -}: { - entityUrl: string; - query?: string; - page?: number; - filters?: Array; - history: RouteComponentProps['history']; -}) => { - const constructedFilters = newFilters || []; - - const search = QueryString.stringify( - { - ...filtersToQueryStringParams(constructedFilters), - query: encodeURIComponent(newQuery || ''), - page: newPage, - }, - { arrayFormat: 'comma' }, - ); - - history.push({ - pathname: entityUrl, - search, - }); -}; diff --git a/datahub-web-react/src/app/search/utils/useFilters.ts b/datahub-web-react/src/app/search/utils/useFilters.ts index cab00fa8258c35..fd3f9e48b0ac48 100644 --- a/datahub-web-react/src/app/search/utils/useFilters.ts +++ b/datahub-web-react/src/app/search/utils/useFilters.ts @@ -2,27 +2,37 @@ import { useMemo } from 'react'; import * as QueryString from 'query-string'; import { FILTER_URL_PREFIX } from './constants'; -import { FacetFilterInput } from '../../../types.generated'; +import { FacetFilterInput, FilterOperator } from '../../../types.generated'; import { decodeComma } from '../../entity/shared/utils'; +import { URL_PARAM_SEPARATOR } from './filtersToQueryStringParams'; export default function useFilters(params: QueryString.ParsedQuery): Array { - return useMemo( - () => - // get all query params + return useMemo(() => { + return ( Object.entries(params) // select only the ones with the `filter_` prefix .filter(([key, _]) => key.indexOf(FILTER_URL_PREFIX) >= 0) // transform the filters currently in format [key, [value1, value2]] to [{key: key, value: value1}, { key: key, value: value2}] format that graphql expects - .flatMap(([key, value]) => { + .map(([key, value]) => { // remove the `filter_` prefix - const field = key.replace(FILTER_URL_PREFIX, ''); - if (!value) return []; + const fieldIndex = key.replace(FILTER_URL_PREFIX, ''); + const fieldParts = fieldIndex.split(URL_PARAM_SEPARATOR); + const field = fieldParts[0]; + const negated = fieldParts[1] === 'true'; + const condition = fieldParts[2] || FilterOperator.Equal; + if (!value) return null; if (Array.isArray(value)) { - return value.map((distinctValue) => ({ field, value: decodeComma(distinctValue) })); + return { + field, + condition, + negated, + values: value.map((distinctValue) => decodeComma(distinctValue)), + }; } - return [{ field, value: decodeComma(value) }]; - }), - [params], - ); + return { field, condition, values: [decodeComma(value)], negated }; + }) + .filter((val) => !!val) as Array + ); + }, [params]); } diff --git a/datahub-web-react/src/app/settings/AccessTokens.tsx b/datahub-web-react/src/app/settings/AccessTokens.tsx index f058355c533ea1..273d3d2fb5ffa6 100644 --- a/datahub-web-react/src/app/settings/AccessTokens.tsx +++ b/datahub-web-react/src/app/settings/AccessTokens.tsx @@ -93,7 +93,7 @@ export const AccessTokens = () => { const filters: Array = [ { field: 'ownerUrn', - value: currentUserUrn, + values: [currentUserUrn], }, ]; diff --git a/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx b/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx index 5ac05c1da75690..f88a27fd0c0799 100644 --- a/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx +++ b/datahub-web-react/src/app/shared/tags/AddTagsTermsModal.tsx @@ -136,7 +136,7 @@ export default function EditTagTermsModal({ entity.type === EntityType.Tag ? (entity as Tag).name : entityRegistry.getDisplayName(entity.type, entity); const tagOrTermComponent = ; return ( - + {tagOrTermComponent} ); @@ -431,6 +431,7 @@ export default function EditTagTermsModal({ > setIsFocusedOnInput(false)}> { + ref.current = value; + }); + return ref.current; +} diff --git a/datahub-web-react/src/app/useGetAuthenticatedUser.tsx b/datahub-web-react/src/app/useGetAuthenticatedUser.tsx index 6214a33b37040a..aaacd98060f860 100644 --- a/datahub-web-react/src/app/useGetAuthenticatedUser.tsx +++ b/datahub-web-react/src/app/useGetAuthenticatedUser.tsx @@ -7,10 +7,7 @@ import { useGetMeQuery } from '../graphql/me.generated'; */ export function useGetAuthenticatedUser(skip?: boolean) { const userUrn = Cookies.get(CLIENT_AUTH_COOKIE); - if (!userUrn) { - throw new Error('Could not find logged in user.'); - } - const { data, error } = useGetMeQuery({ skip, fetchPolicy: 'cache-and-network' }); + const { data, error } = useGetMeQuery({ skip: skip || !userUrn, fetchPolicy: 'cache-and-network' }); if (error) { console.error(`Could not fetch logged in user from cache. + ${error.message}`); } diff --git a/datahub-web-react/src/graphql/analytics.graphql b/datahub-web-react/src/graphql/analytics.graphql index be16a8014f8da9..0ea4235f588128 100644 --- a/datahub-web-react/src/graphql/analytics.graphql +++ b/datahub-web-react/src/graphql/analytics.graphql @@ -61,7 +61,7 @@ fragment analyticsChart on AnalyticsChart { query filters { field - value + values } } entityProfileParams { diff --git a/datahub-web-react/src/graphql/lineage.graphql b/datahub-web-react/src/graphql/lineage.graphql index 35f0b8a4869240..d1d2f10af7ef8c 100644 --- a/datahub-web-react/src/graphql/lineage.graphql +++ b/datahub-web-react/src/graphql/lineage.graphql @@ -172,6 +172,16 @@ fragment lineageNodeProperties on EntityWithRelationships { status { removed } + fineGrainedLineages { + upstreams { + urn + path + } + downstreams { + urn + path + } + } } ... on MLModelGroup { urn @@ -244,6 +254,11 @@ fragment fullLineageResults on EntityLineageResult { type entity { ...lineageFields + ... on Dataset { + schemaMetadata(version: 0)@include(if: $showColumns) { + ...schemaMetadataFields + } + } } } } @@ -267,12 +282,15 @@ fragment partialLineageResults on EntityLineageResult { total } -query getEntityLineage($urn: String!, $separateSiblings: Boolean) { +query getEntityLineage($urn: String!, $separateSiblings: Boolean, $showColumns: Boolean!) { entity(urn: $urn) { urn type ...lineageNodeProperties ... on Dataset { + schemaMetadata(version: 0)@include(if: $showColumns) { + ...schemaMetadataFields + } siblings { isPrimary siblings { diff --git a/datahub-web-react/src/graphql/mutations.graphql b/datahub-web-react/src/graphql/mutations.graphql index 105289fe9bb050..7d1fa0c7cd9446 100644 --- a/datahub-web-react/src/graphql/mutations.graphql +++ b/datahub-web-react/src/graphql/mutations.graphql @@ -114,4 +114,8 @@ mutation createInviteToken($input: CreateInviteTokenInput!) { mutation acceptRole($input: AcceptRoleInput!) { acceptRole(input: $input) +} + +mutation createPost($input: CreatePostInput!) { + createPost(input: $input) } \ No newline at end of file diff --git a/datahub-web-react/src/graphql/post.graphql b/datahub-web-react/src/graphql/post.graphql new file mode 100644 index 00000000000000..c19f38fc7751c1 --- /dev/null +++ b/datahub-web-react/src/graphql/post.graphql @@ -0,0 +1,22 @@ +query listPosts($input: ListPostsInput!) { + listPosts(input: $input) { + start + count + total + posts { + urn + type + postType + content { + contentType + title + description + link + media { + type + location + } + } + } + } +} diff --git a/datahub-web-react/src/graphql/recommendations.graphql b/datahub-web-react/src/graphql/recommendations.graphql index ff42a64eef6d73..5103ad6125b353 100644 --- a/datahub-web-react/src/graphql/recommendations.graphql +++ b/datahub-web-react/src/graphql/recommendations.graphql @@ -15,7 +15,7 @@ query listRecommendations($input: ListRecommendationsInput!) { query filters { field - value + values } } entityProfileParams { diff --git a/datahub-web-react/src/graphql/search.graphql b/datahub-web-react/src/graphql/search.graphql index 4ea340d64f2d29..9b585351fe9f29 100644 --- a/datahub-web-react/src/graphql/search.graphql +++ b/datahub-web-react/src/graphql/search.graphql @@ -378,9 +378,6 @@ fragment searchResultFields on Entity { } } } - inputFields { - ...inputFieldsFields - } subTypes { typeNames } @@ -446,9 +443,6 @@ fragment searchResultFields on Entity { } } } - inputFields { - ...inputFieldsFields - } } ... on DataFlow { flowId diff --git a/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx b/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx index 8cc6c36a0824e1..65b563abbac559 100644 --- a/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx +++ b/datahub-web-react/src/utils/test-utils/TestPageContainer.tsx @@ -19,6 +19,7 @@ import { MLModelEntity } from '../../app/entity/mlModel/MLModelEntity'; import { MLModelGroupEntity } from '../../app/entity/mlModelGroup/MLModelGroupEntity'; import { ChartEntity } from '../../app/entity/chart/ChartEntity'; import { DashboardEntity } from '../../app/entity/dashboard/DashboardEntity'; +import { LineageExplorerContext } from '../../app/lineage/utils/LineageExplorerContext'; type Props = { children: React.ReactNode; @@ -53,7 +54,27 @@ export default ({ children, initialEntries }: Props) => { return ( - {children} + + {}, + highlightedEdges: [], + setHighlightedEdges: () => {}, + visibleColumnsByUrn: {}, + setVisibleColumnsByUrn: () => {}, + columnsByUrn: {}, + setColumnsByUrn: () => {}, + }} + > + {children} + + ); diff --git a/docker/datahub-gms/env/docker-without-neo4j.env b/docker/datahub-gms/env/docker-without-neo4j.env index 45b8e8c5eda54b..6a085266d5c88a 100644 --- a/docker/datahub-gms/env/docker-without-neo4j.env +++ b/docker/datahub-gms/env/docker-without-neo4j.env @@ -16,7 +16,6 @@ MAE_CONSUMER_ENABLED=true MCE_CONSUMER_ENABLED=true PE_CONSUMER_ENABLED=true UI_INGESTION_ENABLED=true -UI_INGESTION_DEFAULT_CLI_VERSION=0.8.42 ENTITY_SERVICE_ENABLE_RETENTION=true # Uncomment to disable persistence of client-side analytics events @@ -46,4 +45,4 @@ ENTITY_SERVICE_ENABLE_RETENTION=true # Uncomment to run a one-time upgrade to migrate legacy default browse path format to latest format # More details can be found at https://datahubproject.io/docs/advanced/browse-paths-upgrade -# UPGRADE_DEFAULT_BROWSE_PATHS_ENABLED=true \ No newline at end of file +# UPGRADE_DEFAULT_BROWSE_PATHS_ENABLED=true diff --git a/docker/datahub-gms/env/docker.env b/docker/datahub-gms/env/docker.env index 1b859aa59b144a..7c0297a3cd8ba9 100644 --- a/docker/datahub-gms/env/docker.env +++ b/docker/datahub-gms/env/docker.env @@ -20,7 +20,6 @@ MAE_CONSUMER_ENABLED=true MCE_CONSUMER_ENABLED=true PE_CONSUMER_ENABLED=true UI_INGESTION_ENABLED=true -UI_INGESTION_DEFAULT_CLI_VERSION=0.8.42 # Uncomment to enable Metadata Service Authentication # METADATA_SERVICE_AUTH_ENABLED=true diff --git a/docker/datahub-mae-consumer/Dockerfile b/docker/datahub-mae-consumer/Dockerfile index c86738e9ff3162..96a34d22a7e638 100644 --- a/docker/datahub-mae-consumer/Dockerfile +++ b/docker/datahub-mae-consumer/Dockerfile @@ -14,7 +14,7 @@ RUN apk --no-cache --update-cache --available upgrade \ else \ echo >&2 "Unsupported architecture $(arch)" ; exit 1; \ fi \ - && apk --no-cache add tar curl bash \ + && apk --no-cache add tar curl bash coreutils \ && apk --no-cache add openjdk11-jre --repository=http://dl-cdn.alpinelinux.org/alpine/edge/community \ && wget --no-verbose https://github.com/open-telemetry/opentelemetry-java-instrumentation/releases/download/v1.4.1/opentelemetry-javaagent-all.jar \ && wget --no-verbose https://repo1.maven.org/maven2/io/prometheus/jmx/jmx_prometheus_javaagent/0.16.1/jmx_prometheus_javaagent-0.16.1.jar -O jmx_prometheus_javaagent.jar \ diff --git a/docker/datahub-mae-consumer/start.sh b/docker/datahub-mae-consumer/start.sh index e8b9d182ca24c0..0a822effe713bc 100755 --- a/docker/datahub-mae-consumer/start.sh +++ b/docker/datahub-mae-consumer/start.sh @@ -1,61 +1,46 @@ #!/bin/bash +set -euo pipefail # Add default URI (http) scheme if needed -if ! echo $NEO4J_HOST | grep -q "://" ; then - NEO4J_HOST="http://$NEO4J_HOST" +if [[ -n ${NEO4J_HOST:-} ]] && [[ ${NEO4J_HOST} != *"://"* ]]; then + NEO4J_HOST="http://$NEO4J_HOST" fi -if [[ ! -z $ELASTICSEARCH_USERNAME ]] && [[ -z $ELASTICSEARCH_AUTH_HEADER ]]; then +if [[ -n ${ELASTICSEARCH_USERNAME:-} ]] && [[ -z ${ELASTICSEARCH_AUTH_HEADER:-} ]]; then AUTH_TOKEN=$(echo -ne "$ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD" | base64 --wrap 0) ELASTICSEARCH_AUTH_HEADER="Authorization:Basic $AUTH_TOKEN" fi # Add default header if needed -if [[ -z $ELASTICSEARCH_AUTH_HEADER ]]; then - ELASTICSEARCH_AUTH_HEADER="Accept: */*" -fi +: "${ELASTICSEARCH_AUTH_HEADER="Accept: */*"}" -if [[ $ELASTICSEARCH_USE_SSL == true ]]; then +if [[ ${ELASTICSEARCH_USE_SSL:-false} == true ]]; then ELASTICSEARCH_PROTOCOL=https else ELASTICSEARCH_PROTOCOL=http fi -WAIT_FOR_KAFKA="" -if [[ $SKIP_KAFKA_CHECK != true ]]; then - WAIT_FOR_KAFKA=" -wait tcp://$(echo $KAFKA_BOOTSTRAP_SERVER | sed 's/,/ -wait tcp:\/\//g') " +dockerize_args=("-timeout" "240s") +if [[ ${SKIP_KAFKA_CHECK:-false} != true ]]; then + IFS=',' read -ra KAFKAS <<< "$KAFKA_BOOTSTRAP_SERVER" + for i in "${KAFKAS[@]}"; do + dockerize_args+=("-wait" "tcp://$i") + done fi - -WAIT_FOR_ELASTICSEARCH="" -if [[ $SKIP_ELASTICSEARCH_CHECK != true ]]; then - WAIT_FOR_ELASTICSEARCH=" -wait $ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT -wait-http-header \"$ELASTICSEARCH_AUTH_HEADER\"" +if [[ ${SKIP_ELASTICSEARCH_CHECK:-false} != true ]]; then + dockerize_args+=("-wait" "$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT" "-wait-http-header" "$ELASTICSEARCH_AUTH_HEADER") fi - -WAIT_FOR_NEO4J="" -if [[ $GRAPH_SERVICE_IMPL != elasticsearch ]] && [[ $SKIP_NEO4J_CHECK != true ]]; then - WAIT_FOR_NEO4J=" -wait $NEO4J_HOST " +if [[ ${GRAPH_SERVICE_IMPL:-} != elasticsearch ]] && [[ ${SKIP_NEO4J_CHECK:-false} != true ]]; then + dockerize_args+=("-wait" "$NEO4J_HOST") fi -OTEL_AGENT="" -if [[ $ENABLE_OTEL == true ]]; then - OTEL_AGENT="-javaagent:opentelemetry-javaagent-all.jar " +JAVA_TOOL_OPTIONS="${JDK_JAVA_OPTIONS:-}${JAVA_OPTS:+ JAVA_OPTS}${JMX_OPTS:+ JMX_OPTS}" +if [[ ${ENABLE_OTEL:-false} == true ]]; then + JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -javaagent:opentelemetry-javaagent-all.jar" fi - -PROMETHEUS_AGENT="" -if [[ $ENABLE_PROMETHEUS == true ]]; then - PROMETHEUS_AGENT="-javaagent:jmx_prometheus_javaagent.jar=4318:/datahub/datahub-mae-consumer/scripts/prometheus-config.yaml " +if [[ ${ENABLE_PROMETHEUS:-false} == true ]]; then + JAVA_TOOL_OPTIONS="$JAVA_TOOL_OPTIONS -javaagent:jmx_prometheus_javaagent.jar=4318:/datahub/datahub-mae-consumer/scripts/prometheus-config.yaml" fi -COMMON=" - $WAIT_FOR_KAFKA \ - $WAIT_FOR_NEO4J \ - -timeout 240s \ - java $JAVA_OPTS $JMX_OPTS $OTEL_AGENT $PROMETHEUS_AGENT -jar /datahub/datahub-mae-consumer/bin/mae-consumer-job.jar -" -if [[ $SKIP_ELASTICSEARCH_CHECK != true ]]; then - exec dockerize \ - -wait $ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT -wait-http-header "$ELASTICSEARCH_AUTH_HEADER" \ - $COMMON -else - exec dockerize $COMMON -fi +export JAVA_TOOL_OPTIONS +exec dockerize "${dockerize_args[@]}" java -jar /datahub/datahub-mae-consumer/bin/mae-consumer-job.jar diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml index 58ee367126c506..7592fd5e3cd78a 100644 --- a/docker/docker-compose.yml +++ b/docker/docker-compose.yml @@ -69,7 +69,7 @@ services: retries: 4 neo4j: - image: neo4j:4.0.6 + image: neo4j:4.4.9-community env_file: neo4j/env/docker.env hostname: neo4j container_name: neo4j diff --git a/docker/elasticsearch-setup/create-indices.sh b/docker/elasticsearch-setup/create-indices.sh old mode 100755 new mode 100644 index 23b5282d09fdc9..62dd80afc584e5 --- a/docker/elasticsearch-setup/create-indices.sh +++ b/docker/elasticsearch-setup/create-indices.sh @@ -4,96 +4,139 @@ set -e : ${DATAHUB_ANALYTICS_ENABLED:=true} : ${USE_AWS_ELASTICSEARCH:=false} +: ${ELASTICSEARCH_INSECURE:=false} +# protocol: http or https? if [[ $ELASTICSEARCH_USE_SSL == true ]]; then ELASTICSEARCH_PROTOCOL=https else ELASTICSEARCH_PROTOCOL=http fi -echo -e "Going to use protocol: $ELASTICSEARCH_PROTOCOL" +echo -e "going to use protocol: $ELASTICSEARCH_PROTOCOL" -if [[ ! -z $ELASTICSEARCH_USERNAME ]] && [[ -z $ELASTICSEARCH_AUTH_HEADER ]]; then - AUTH_TOKEN=$(echo -ne "$ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD" | base64 --wrap 0) - ELASTICSEARCH_AUTH_HEADER="Authorization:Basic $AUTH_TOKEN" -fi +# Elasticsearch URL to be suffixed with a resource address +ELASTICSEARCH_URL="$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT" -# Add default header if needed +# set auth header if none is given if [[ -z $ELASTICSEARCH_AUTH_HEADER ]]; then - echo -e "Going to use default elastic headers" - ELASTICSEARCH_AUTH_HEADER="Accept: */*" + if [[ ! -z $ELASTICSEARCH_USERNAME ]]; then + # no auth header given, but username is defined -> use it to create the auth header + AUTH_TOKEN=$(echo -ne "$ELASTICSEARCH_USERNAME:$ELASTICSEARCH_PASSWORD" | base64 --wrap 0) + ELASTICSEARCH_AUTH_HEADER="Authorization:Basic $AUTH_TOKEN" + echo -e "going to use elastic headers based on username and password" + else + # no auth header or username given -> use default auth header + ELASTICSEARCH_AUTH_HEADER="Accept: */*" + echo -e "going to use default elastic headers" + fi fi -if [[ $ELASTICSEARCH_INSECURE ]]; then - ELASTICSEARCH_INSECURE="-k " +# will be using this for all curl communication with Elasticsearch: +CURL_ARGS=( + --silent + --header "$ELASTICSEARCH_AUTH_HEADER" +) +# ... also optionally use --insecure +if [[ $ELASTICSEARCH_INSECURE == true ]]; then + CURL_ARGS+=(--insecure) fi -function create_datahub_usage_event_datastream() { - if [[ -z "$INDEX_PREFIX" ]]; then - PREFIX='' - else - PREFIX="${INDEX_PREFIX}_" - fi - echo -e "Create datahub_usage_event if needed against Elasticsearch at $ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT" - echo -e "Going to use index prefix:$PREFIX:" - POLICY_RESPONSE_CODE=$(curl -o /dev/null -s -w "%{http_code}" --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_ilm/policy/${PREFIX}datahub_usage_event_policy") - echo -e "Policy GET response code is $POLICY_RESPONSE_CODE" - POLICY_NAME="${PREFIX}datahub_usage_event_policy" - if [ $POLICY_RESPONSE_CODE -eq 404 ]; then - echo -e "\ncreating $POLICY_NAME" - sed -e "s/PREFIX/${PREFIX}/g" /index/usage-event/policy.json | tee -a /tmp/policy.json - curl -s -XPUT --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_ilm/policy/$POLICY_NAME" --header "Content-Type: application/json" --data "@/tmp/policy.json" - elif [ $POLICY_RESPONSE_CODE -eq 200 ]; then - echo -e "\n${POLICY_NAME} exists" - elif [ $POLICY_RESPONSE_CODE -eq 403 ]; then - echo -e "Forbidden so exiting" - exit 1 - else - echo -e "Got response code $POLICY_RESPONSE_CODE while creating policy so exiting." - exit 1 - fi - - TEMPLATE_RESPONSE_CODE=$(curl -o /dev/null -s -w "%{http_code}" --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_index_template/${PREFIX}datahub_usage_event_index_template") - echo -e "Template GET response code is $TEMPLATE_RESPONSE_CODE" - TEMPLATE_NAME="${PREFIX}datahub_usage_event_index_template" - if [ $TEMPLATE_RESPONSE_CODE -eq 404 ]; then - echo -e "\ncreating $TEMPLATE_NAME" - sed -e "s/PREFIX/${PREFIX}/g" /index/usage-event/index_template.json | tee -a /tmp/index_template.json - curl -s -XPUT --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_index_template/$TEMPLATE_NAME" --header "Content-Type: application/json" --data "@/tmp/index_template.json" - elif [ $TEMPLATE_RESPONSE_CODE -eq 200 ]; then - echo -e "\n$TEMPLATE_NAME exists" - elif [ $TEMPLATE_RESPONSE_CODE -eq 403 ]; then - echo -e "Forbidden so exiting" +# index prefix used throughout the script +if [[ -z "$INDEX_PREFIX" ]]; then + PREFIX='' + echo -e "not using any prefix" +else + PREFIX="${INDEX_PREFIX}_" + echo -e "going to use prefix: '$PREFIX'" +fi + +# path where index definitions are stored +INDEX_DEFINITIONS_ROOT=/index/usage-event + + +# check Elasticsearch for given index/resource (first argument) +# if it doesn't exist (http code 404), use the given file (second argument) to create it +function create_if_not_exists { + RESOURCE_ADDRESS="$1" + RESOURCE_DEFINITION_NAME="$2" + + # query ES to see if the resource already exists + RESOURCE_STATUS=$(curl "${CURL_ARGS[@]}" -o /dev/null -w "%{http_code}\n" "$ELASTICSEARCH_URL/$RESOURCE_ADDRESS") + echo -e "\n>>> GET $RESOURCE_ADDRESS response code is $RESOURCE_STATUS" + + if [ $RESOURCE_STATUS -eq 200 ]; then + # resource already exists -> nothing to do + echo -e ">>> $RESOURCE_ADDRESS already exists ✓" + + elif [ $RESOURCE_STATUS -eq 404 ]; then + # resource doesn't exist -> need to create it + echo -e ">>> creating $RESOURCE_ADDRESS because it doesn't exist ..." + # use the file at given path as definition, but first replace all occurences of `PREFIX` + # placeholder within the file with the actual prefix value + TMP_SOURCE_PATH="/tmp/$RESOURCE_DEFINITION_NAME" + sed -e "s/PREFIX/$PREFIX/g" "$INDEX_DEFINITIONS_ROOT/$RESOURCE_DEFINITION_NAME" | tee -a "$TMP_SOURCE_PATH" + curl "${CURL_ARGS[@]}" -XPUT "$ELASTICSEARCH_URL/$RESOURCE_ADDRESS" -H 'Content-Type: application/json' --data "@$TMP_SOURCE_PATH" + + elif [ $RESOURCE_STATUS -eq 403 ]; then + # probably authorization fail + echo -e ">>> forbidden access to $RESOURCE_ADDRESS ! -> exiting" exit 1 + else - echo -e "Got response code $TEMPLATE_RESPONSE_CODE while creating template so exiting." + # when `USE_AWS_ELASTICSEARCH` was forgotten to be set to `true` when running against AWS ES OSS, + # this script will use wrong paths (e.g. `_ilm/policy/` instead of AWS-compatible `_opendistro/_ism/policies/`) + # and the ES endpoint will return `401 Unauthorized` or `405 Method Not Allowed` + # let's use this as chance to point that wrong config might be used! + if [ $RESOURCE_STATUS -eq 401 ] || [ $RESOURCE_STATUS -eq 405 ]; then + if [[ $USE_AWS_ELASTICSEARCH == false ]] && [[ $ELASTICSEARCH_URL == *"amazonaws"* ]]; then + echo "... looks like AWS OpenSearch is used; please set USE_AWS_ELASTICSEARCH env value to true" + fi + fi + + echo -e ">>> failed to GET $RESOURCE_ADDRESS ! -> exiting" exit 1 fi } +# create indices for ES (non-AWS) +function create_datahub_usage_event_datastream() { + # non-AWS env requires creation of two resources for Datahub usage events: + # 1. ILM policy + create_if_not_exists "_ilm/policy/${PREFIX}datahub_usage_event_policy" policy.json + # 2. index template + create_if_not_exists "_index_template/${PREFIX}datahub_usage_event_index_template" index_template.json +} + +# create indices for ES OSS (AWS) function create_datahub_usage_event_aws_elasticsearch() { - if [[ -z "$INDEX_PREFIX" ]]; then - PREFIX='' - else - PREFIX="${INDEX_PREFIX}_" - fi + # AWS env requires creation of three resources for Datahub usage events: + # 1. ISM policy + create_if_not_exists "_opendistro/_ism/policies/${PREFIX}datahub_usage_event_policy" aws_es_ism_policy.json - if [ $(curl -o /dev/null -s -w "%{http_code}" --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_opendistro/_ism/policies/${PREFIX}datahub_usage_event_policy") -eq 404 ] - then - echo -e "\ncreating datahub_usage_event_policy" - sed -e "s/PREFIX/${PREFIX}/g" /index/usage-event/aws_es_ism_policy.json | tee -a /tmp/aws_es_ism_policy.json - curl -XPUT --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_opendistro/_ism/policies/${PREFIX}datahub_usage_event_policy" -H 'Content-Type: application/json' --data @/tmp/aws_es_ism_policy.json - else - echo -e "\ndatahub_usage_event_policy exists" - fi - if [ $(curl -o /dev/null -s -w "%{http_code}" --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_template/${PREFIX}datahub_usage_event_index_template") -eq 404 ] - then - echo -e "\ncreating datahub_usage_event_index_template" - sed -e "s/PREFIX/${PREFIX}/g" /index/usage-event/aws_es_index_template.json | tee -a /tmp/aws_es_index_template.json - curl -XPUT --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/_template/${PREFIX}datahub_usage_event_index_template" -H 'Content-Type: application/json' --data @/tmp/aws_es_index_template.json - curl -XPUT --header "$ELASTICSEARCH_AUTH_HEADER" "${ELASTICSEARCH_INSECURE}$ELASTICSEARCH_PROTOCOL://$ELASTICSEARCH_HOST:$ELASTICSEARCH_PORT/${PREFIX}datahub_usage_event-000001" -H 'Content-Type: application/json' --data "{\"aliases\":{\"${PREFIX}datahub_usage_event\":{\"is_write_index\":true}}}" - else - echo -e "\ndatahub_usage_event_index_template exists" + # 2. index template + create_if_not_exists "_template/${PREFIX}datahub_usage_event_index_template" aws_es_index_template.json + + # 3. event index datahub_usage_event-000001 + # (note that AWS *rollover* indices need to use `^.*-\d+$` naming pattern) + # -> https://aws.amazon.com/premiumsupport/knowledge-center/opensearch-failed-rollover-index/ + INDEX_SUFFIX="000001" + # ... but first check whether `datahub_usage_event` wasn't already autocreated by GMS before `datahub_usage_event-000001` + # (as is common case when this script was initially run without properly setting `USE_AWS_ELASTICSEARCH` to `true`) + # -> https://github.com/datahub-project/datahub/issues/5376 + USAGE_EVENT_STATUS=$(curl "${CURL_ARGS[@]}" -o /dev/null -w "%{http_code}\n" "$ELASTICSEARCH_URL/${PREFIX}datahub_usage_event") + if [ $USAGE_EVENT_STATUS -eq 200 ]; then + USAGE_EVENT_DEFINITION=$(curl "${CURL_ARGS[@]}" "$ELASTICSEARCH_URL/${PREFIX}datahub_usage_event") + # the definition is expected to contain "datahub_usage_event-000001" string + if [[ $USAGE_EVENT_DEFINITION != *"datahub_usage_event-$INDEX_SUFFIX"* ]]; then + # ... if it doesn't, we need to drop it + echo -e "\n>>> deleting invalid datahub_usage_event ..." + curl "${CURL_ARGS[@]}" -XDELETE "$ELASTICSEARCH_URL/${PREFIX}datahub_usage_event" + # ... and then recreate it below + fi fi + + # ... now we are safe to create the index + create_if_not_exists "${PREFIX}datahub_usage_event-$INDEX_SUFFIX" aws_es_index.json } if [[ $DATAHUB_ANALYTICS_ENABLED == true ]]; then @@ -115,4 +158,4 @@ else elif [ $DATAHUB_USAGE_EVENT_INDEX_RESPONSE_CODE -eq 403 ]; then echo -e "Forbidden so exiting" fi -fi +fi \ No newline at end of file diff --git a/docker/kafka-setup/kafka-setup.sh b/docker/kafka-setup/kafka-setup.sh index d8cc4a251cb151..e6dcf6e199febd 100755 --- a/docker/kafka-setup/kafka-setup.sh +++ b/docker/kafka-setup/kafka-setup.sh @@ -1,5 +1,5 @@ #!/bin/bash -: ${PARTITIONS:=6} +: ${PARTITIONS:=1} : ${REPLICATION_FACTOR:=1} : ${KAFKA_PROPERTIES_SECURITY_PROTOCOL:=PLAINTEXT} diff --git a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml index 05ca6576787b67..b007b59d52841c 100644 --- a/docker/quickstart/docker-compose-without-neo4j.quickstart.yml +++ b/docker/quickstart/docker-compose-without-neo4j.quickstart.yml @@ -82,7 +82,6 @@ services: - MCE_CONSUMER_ENABLED=true - PE_CONSUMER_ENABLED=true - UI_INGESTION_ENABLED=true - - UI_INGESTION_DEFAULT_CLI_VERSION=0.8.42 - ENTITY_SERVICE_ENABLE_RETENTION=true hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} diff --git a/docker/quickstart/docker-compose.quickstart.yml b/docker/quickstart/docker-compose.quickstart.yml index dc35e0c935c2ff..6c0772206a2be7 100644 --- a/docker/quickstart/docker-compose.quickstart.yml +++ b/docker/quickstart/docker-compose.quickstart.yml @@ -89,7 +89,6 @@ services: - MCE_CONSUMER_ENABLED=true - PE_CONSUMER_ENABLED=true - UI_INGESTION_ENABLED=true - - UI_INGESTION_DEFAULT_CLI_VERSION=0.8.42 hostname: datahub-gms image: ${DATAHUB_GMS_IMAGE:-linkedin/datahub-gms}:${DATAHUB_VERSION:-head} ports: @@ -171,7 +170,7 @@ services: - NEO4J_dbms_default__database=graph.db - NEO4J_dbms_allow__upgrade=true hostname: neo4j - image: neo4j:4.0.6 + image: neo4j:4.4.9-community ports: - ${DATAHUB_MAPPED_NEO4J_HTTP_PORT:-7474}:7474 - ${DATAHUB_MAPPED_NEO4J_BOLT_PORT:-7687}:7687 diff --git a/docs-website/src/pages/docs/_components/QuickLinkCards/index.js b/docs-website/src/pages/docs/_components/QuickLinkCards/index.js index 5d7d22198006c3..ca450cd291fc72 100644 --- a/docs-website/src/pages/docs/_components/QuickLinkCards/index.js +++ b/docs-website/src/pages/docs/_components/QuickLinkCards/index.js @@ -20,7 +20,7 @@ const quickLinkContent = [ title: "Get Started", icon: , description: "Details on how to get DataHub up and running", - to: "/docs/wip/get-started", + to: "/docs/quickstart", }, { title: "Ingest Metadata", diff --git a/docs/authentication/guides/sso/configure-oidc-react-okta.md b/docs/authentication/guides/sso/configure-oidc-react-okta.md index 6e7119e8368cbb..3766d7dbc20caa 100644 --- a/docs/authentication/guides/sso/configure-oidc-react-okta.md +++ b/docs/authentication/guides/sso/configure-oidc-react-okta.md @@ -84,7 +84,7 @@ AUTH_OIDC_SCOPE="openid profile email groups" Replacing the placeholders above with the client id & client secret received from Okta in Step 2. -> **Pro Tip!** You can easily enable Okta to return the groups that a user is associated with, which will be provisioned in DataHub, along with the user logging in, +> **Pro Tip!** You can easily enable Okta to return the groups that a user is associated with, which will be provisioned in DataHub, along with the user logging in. This can be enabled by setting the `AUTH_OIDC_EXTRACT_GROUPS_ENABLED` flag to `true`. > if they do not already exist in DataHub. You can enable your Okta application to return a 'groups' claim from the Okta Console at Applications > Your Application -> Sign On -> OpenID Connect ID Token Settings (Requires an edit). > > By default, we assume that the groups will appear in a claim named "groups". This can be customized using the `AUTH_OIDC_GROUPS_CLAIM` container configuration. diff --git a/docs/how/search.md b/docs/how/search.md index 6cef5cf8893118..4617929e02572a 100644 --- a/docs/how/search.md +++ b/docs/how/search.md @@ -1,46 +1,184 @@ -# Search Guide +import FeatureAvailability from '@site/src/components/FeatureAvailability'; -## Introduction +# About DataHub Search -The search bar is one of the means of finding data in Datahub. In this document, we discuss more effective ways of finding information beyond doing a standard keyword search. This is because keyword searches can return results from almost any part of an entity. + -### Search in Specific Fields + + + + +The **search bar** is an important mechanism for discovering data assets in DataHub. From the search bar, you can find Datasets, Columns, Dashboards, Charts, Data Pipelines, and more. Simply type in a term and press 'enter'. + +

+ +

+ +**Advanced queries** and the **filter sidebar** helps fine tuning queries. For programmatic users Datahub provides a **GraphQL API** as well. + +## Search Setup, Prerequisites, and Permissions + +Search is available for all users. Although Search works out of the box, the more relevant data you ingest, the better the results are. + +## Using Search + +Searching is as easy as typing in relevant business terms and pressing 'enter' to view matching data assets. + +By default, search terms will match against different aspects of a data assets. This includes asset names, descriptions, tags, terms, owners, and even specific attributes like the names of columns in a table. + + +### Filters + +The filters sidebar sits on the left hand side of search results, and lets users find assets by drilling down. You can quickly filter by Data Platform (e.g. Snowflake), Tags, Glossary Terms, Domain, Owners, and more with a single click. + +

+ +

+ +### Advanced Filters + +Using the Advanced Filter view, you can apply more complex filters. To get there, click 'Advanced' in the top right of the filter panel: + +

+ +

+ +#### Adding an Advanced Filter + +Currently, Advanced Filters support filtering by Column Name, Container, Domain, Description (entity or column level), Tag (entity or column level), Glossary Term (entity or column level), Owner, Entity Type, Subtype, Environment and soft-deleted status. + +To add a new filter, click the add filter menu, choose a filter type, and then fill in the values you want to filter by. + +

+ +

+ +#### Matching Any Advanced Filter + +By default, all filters must be matched in order for a result to appear. For example, if you add a tag filter and a platform filter, all results will have the tag and the platform. You can set the results to match any filter instead. Click on `all filters` and select `any filter` from the drop-down menu. + +

+ +

+ +#### Negating An Advanced Filter + +After creating a filter, you can choose whether results should or should not match it. Change this by clicking the operation in the top right of the filter and selecting the negated operation. + +

+ +

+ + +### Results + +Search results appear ranked by their relevance. In self-hosted DataHub ranking is based on how closely the query matched textual fields of an asset and its metadata. In Managed DataHub, ranking is based on a combination of textual relevance, usage (queries / views), and change frequency. + +With better metadata comes better results. Learn more about ingestion technical metadata in the [metadata ingestion](../../metadata-ingestion/README.md) guide. + +### Advanced queries + +The search bar supports advanced queries with pattern matching, logical expressions and filtering by specific field matches. + +The following examples are in the format of +X: *typical question* : ```what to key in search bar```. [sample url](https://example.com) Wildcard characters can be added to the search terms as well. These examples are non exhaustive and using Datasets as a reference. -I want to: -1. *Find a dataset with the word **mask** in the name* : +If you want to: +1. Find a dataset with the word **mask** in the name: ```name: *mask*``` [Sample results](https://demo.datahubproject.io/search?page=1&query=name%3A%20%2Amask%2A) This will return entities with **mask** in the name. Names tends to be connected by other symbols, hence the wildcard symbols before and after the word. -2. *Find a dataset with a property, **encoding*** +2. Find a dataset with a property, **encoding** ```customProperties: encoding*``` [Sample results](https://demo.datahubproject.io/search?page=1&query=customProperties%3A%20encoding%2A) Dataset Properties are indexed in ElasticSearch the manner of key=value. Hence if you know the precise key-value pair, you can search using ```key=value```. However, if you only know the key, you can use wildcards to replace the value and that is what is being done here. -3. *Find a dataset with a column name, **latitude*** +3. Find a dataset with a column name, **latitude** ```fieldPaths: latitude``` [Sample results](https://demo.datahubproject.io/search?page=1&query=fieldPaths%3A%20latitude) fieldPaths is the name of the attribute that holds the column name in Datasets. -4. *Find a dataset with the term **latitude** in the field description* +4. Find a dataset with the term **latitude** in the field description ```editedFieldDescriptions: latitude OR fieldDescriptions: latitude``` [Sample results](https://demo.datahubproject.io/search?page=1&query=editedFieldDescriptions%3A%20latitude%20OR%20fieldDescriptions%3A%20latitude) Datasets has 2 attributes that contains field description. fieldDescription comes from the SchemaMetadata aspect, while editedFieldDescriptions comes from the EditableSchemaMetadata aspect. EditableSchemaMetadata holds information that comes from UI edits, while SchemaMetadata holds data from ingestion of the dataset. -5. *Find a dataset with the term **logical** in the dataset description* +5. Find a dataset with the term **logical** in the dataset description ```editedDescription: *logical* OR description: *logical*``` [Sample results](https://demo.datahubproject.io/search?page=1&query=editedDescription%3A%20%2Alogical%2A%20OR%20description%3A%20%2Alogical%2A) Similar to field descriptions, dataset descriptions can be found in 2 aspects, hence the need to search 2 attributes. -6. *Find a dataset which reside in one of the browsing folders, for instance, the **hive** folder* +6. Find a dataset which reside in one of the browsing folders, for instance, the **hive** folder ```browsePaths: *hive*``` [Sample results](https://demo.datahubproject.io/search?page=1&query=browsePaths%3A%20%2Ahive%2A) BrowsePath is stored as a complete string, for instance ```/datasets/prod/hive/SampleKafkaDataset```, hence the need for wildcards on both ends of the term to return a result. -## Where to find more information? + + +### Videos + +**What can you do with DataHub?** + +

+ +

+ + +### GraphQL + +* [searchAcrossEntities](https://datahubproject.io/docs/graphql/queries/#searchacrossentities) +* You can try out the API on the demo instance's public GraphQL interface: [here](https://demo.datahubproject.io/api/graphiql) + +The same GraphQL API that powers the Search UI can be used +for integrations and programmatic use-cases. + +``` +# Example query +{ + searchAcrossEntities( + input: {types: [], query: "*", start: 0, count: 10, filters: [{field: "fieldTags", value: "urn:li:tag:Dimension"}]} + ) { + start + count + total + searchResults { + entity { + type + ... on Dataset { + urn + type + platform { + name + } + name + } + } + } + } +} +``` + + +### DataHub Blog +* [Using DataHub for Search & Discovery](https://blog.datahubproject.io/using-datahub-for-search-discovery-fa309089be22) + +## FAQ and Troubleshooting + +**How are the results ordered?** + +The order of the search results is based on the weight what Datahub gives them based on our search algorithm. The current algorithm in OSS DataHub is based on a text-match score from Elastic Search. + +**Where to find more information?** + The sample queries here are non exhaustive. [The link here](https://demo.datahubproject.io/tag/urn:li:tag:Searchable) shows the current list of indexed fields for each entity inside Datahub. Click on the fields inside each entity and see which field has the tag ```Searchable```. However, it does not tell you the specific attribute name to use for specialized searches. One way to do so is to inspect the ElasticSearch indices, for example: -```curl http://localhost:9200/_cat/indices``` returns all the ES indices in the ElasticSearch container. +`curl http://localhost:9200/_cat/indices` returns all the ES indices in the ElasticSearch container. + ``` yellow open chartindex_v2_1643510690325 bQO_RSiCSUiKJYsmJClsew 1 1 2 0 8.5kb 8.5kb yellow open mlmodelgroupindex_v2_1643510678529 OjIy0wb7RyKqLz3uTENRHQ 1 1 0 0 208b 208b @@ -74,11 +212,13 @@ yellow open system_metadata_service_v1 36spEDbDTdKgVl yellow open schemafieldindex_v2_1643510684410 tZ1gC3haTReRLmpCxirVxQ 1 1 0 0 208b 208b yellow open mlfeatureindex_v2_1643510680246 aQO5HF0mT62Znn-oIWBC8A 1 1 20 0 17.4kb 17.4kb yellow open tagindex_v2_1643510684785 PfnUdCUORY2fnF3I3W7HwA 1 1 3 1 18.6kb 18.6kb -``` +``` + The index name will vary from instance to instance. Indexed information about Datasets can be found in: -```curl http://localhost:9200/datasetindex_v2_1643510688970/_search?=pretty``` +`curl http://localhost:9200/datasetindex_v2_1643510688970/_search?=pretty` + +example information of a dataset: -example information of a dataset: ``` { "_index" : "datasetindex_v2_1643510688970", @@ -121,5 +261,16 @@ example information of a dataset: }, ``` + + +*Need more help? Join the conversation in [Slack](http://slack.datahubproject.io)!* +### Related Features +* [Metadata ingestion framework](../../metadata-ingestion/README.md) diff --git a/docs/lineage/airflow.md b/docs/lineage/airflow.md index 770d3910c41246..002f5fa919b57b 100644 --- a/docs/lineage/airflow.md +++ b/docs/lineage/airflow.md @@ -44,6 +44,7 @@ We recommend you use the lineage plugin if you are on Airflow version >= 2.0.2 o |Name | Default value | Description | |---|---|---| + | datahub.enabled | true | If the plugin should be enabled. | | datahub.conn_id | datahub_rest_default | The name of the datahub connection you set in step 1. | | datahub.cluster | prod | name of the airflow cluster | | datahub.capture_ownership_info | true | If true, the owners field of the DAG will be capture as a DataHub corpuser. | @@ -99,6 +100,7 @@ If you are looking to run Airflow and DataHub using docker locally, follow the g [lineage] backend = datahub_provider.lineage.datahub.DatahubLineageBackend datahub_kwargs = { + "enabled": true, "datahub_conn_id": "datahub_rest_default", "cluster": "prod", "capture_ownership_info": true, diff --git a/docs/managed-datahub/approval-workflows.md b/docs/managed-datahub/approval-workflows.md index e7035ec1b2f6bf..3853a7c37817fd 100644 --- a/docs/managed-datahub/approval-workflows.md +++ b/docs/managed-datahub/approval-workflows.md @@ -181,7 +181,10 @@ To be able to approve or deny proposals you need one of the following Metadata p - Manage Dataset Column Tag Proposals - Manage Dataset Column Term Proposals -These map directly to the 4 privileges for doing the proposals +These map directly to the 4 privileges for doing the proposals. + +To be able to approve or deny proposals to the glossary itself, you just need one permission: +- Manage Glossaries ### Videos diff --git a/docs/quickstart.md b/docs/quickstart.md index f8fc64e8e65f25..83efe2fc1f262d 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -96,6 +96,7 @@ By default the quickstart deploy will require the following ports to be free on In case the default ports conflict with software you are already running on your machine, you can override these ports by passing additional flags to the `datahub docker quickstart` command. e.g. To override the MySQL port with 53306 (instead of the default 3306), you can say: `datahub docker quickstart --mysql-port 53306`. Use `datahub docker quickstart --help` to see all the supported options. + For the metadata service container (datahub-gms), you need to use an environment variable, `DATAHUB_MAPPED_GMS_PORT`. So for instance to use the port 58080, you would say `DATAHUB_MAPPED_GMS_PORT=58080 datahub docker quickstart` diff --git a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java index ab57d19bf3ca32..2dc2ef9d2bd0c8 100644 --- a/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java +++ b/entity-registry/src/main/java/com/linkedin/metadata/models/registry/LineageRegistry.java @@ -132,5 +132,27 @@ public static class EdgeInfo { String type; RelationshipDirection direction; String opposingEntityType; + + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } + + if (o instanceof EdgeInfo) { + return ((EdgeInfo) o).type.equalsIgnoreCase(this.type) + && ((EdgeInfo) o).direction.equals(this.direction) + && ((EdgeInfo) o).opposingEntityType.equalsIgnoreCase(this.opposingEntityType); + } + return false; + } + + @Override + public int hashCode() { + return ((this.type == null ? 0 : this.type.toLowerCase().hashCode()) + ^ (this.direction == null ? 0 : this.direction.hashCode()) + ^ (this.opposingEntityType == null ? 0 : this.opposingEntityType.toLowerCase().hashCode())); + } } + } diff --git a/gradle/versioning/versioning.gradle b/gradle/versioning/versioning.gradle new file mode 100644 index 00000000000000..50fd3e844bd409 --- /dev/null +++ b/gradle/versioning/versioning.gradle @@ -0,0 +1,86 @@ +/** + Applies a consistent versioning scheme to all projects using this script + +Uses git tags to mint versions by default. +git tags can be of a few forms: +- short sha (typical for a PR or a commit) (e.g. 38960ae) +- versioned tags (typical for a release) (e.g. v0.8.45, v0.8.45.1, v0.8.45rc1, v0.8.45.1rc4) + +Produces the following variables and supports token replacement +- version: server version amenable for creating jars +- fullVersion: full version string +- cliMajorVersion: cli version amenable for binding to server as a default + 0.8.44 or 0.8.44-1 (for clean tags) or 0.8.45-SNAPSHOT (for unclean repositories) + + All inference can be overridden by passing in the releaseVersion property + e.g. -PreleaseVersion=0.2.3.4 will set the jar version to 0.2.3-4 + + **/ + + +import org.apache.tools.ant.filters.ReplaceTokens + +def detailedVersionString = "0.0.0-unknown-SNAPSHOT" +def cliMajorVersion = "0.8.42" // base default cli major version +def snapshotVersion = false +if (project.hasProperty("releaseVersion")) { + version = releaseVersion + detailedVersionString = releaseVersion +} else { + try { + // apply this plugin in a try-catch block so that we can handle cases without .git directory + apply plugin: "com.palantir.git-version" + def details = versionDetails() + detailedVersionString = gitVersion() + version = details.lastTag + version = version.startsWith("v")? version.substring(1): version + def suffix = details.isCleanTag? "": "-SNAPSHOT" + snapshotVersion = ! details.isCleanTag + } + catch (Exception e) { + e.printStackTrace() + // last fall back + version = detailedVersionString + } +} + +// trim version if it is of size 4 to size 3 +def versionParts = version.tokenize(".") +if (versionParts.size() > 3) { + // at-least 4 part version + // we check if the 4th part is a .0 in which case we want to create a release + if ((versionParts.size() == 4) && (versionParts[3] == '0')) { + versionParts = versionParts[0..2] + } + version = versionParts[0..2].join('.') + if (versionParts.size() > 3) { + version = version + "-" + versionParts[3..versionParts.size()-1].join('-') + } + cliMajorVersion = version +} else if (versionParts.size() == 3) { + cliMajorVersion = version +} + +if (snapshotVersion) { + if (versionParts[versionParts.size()-1].isInteger()) { + def base_version = versionParts[0..versionParts.size()-2].join('.') + version = base_version + '.' + (versionParts[versionParts.size()-1].toInteger()+1).toString() + "-SNAPSHOT" + cliMajorVersion = base_version + "." + versionParts[versionParts.size()-1] + } else { + // we are unable to part the last token as an integer, so we just append SNAPSHOT to this version + version = versionParts[0..versionParts.size()-1].join('.') + '-SNAPSHOT' + cliMajorVersion = versionParts[0..versionParts.size()-1].join('.') + } +} + + +processResources { + filter(ReplaceTokens, tokens:[fullVersion: detailedVersionString]) + filter(ReplaceTokens, tokens:[cliMajorVersion: cliMajorVersion]) +} + +task printVersionDetails() { + println("fullVersion=" + detailedVersionString) + println("cliMajorVersion=" + cliMajorVersion) + println("version=" + version) +} diff --git a/li-utils/src/main/java/com/linkedin/metadata/Constants.java b/li-utils/src/main/java/com/linkedin/metadata/Constants.java index adee116295d44b..1f1408632b7779 100644 --- a/li-utils/src/main/java/com/linkedin/metadata/Constants.java +++ b/li-utils/src/main/java/com/linkedin/metadata/Constants.java @@ -47,6 +47,7 @@ public class Constants { public static final String DATA_HUB_UPGRADE_ENTITY_NAME = "dataHubUpgrade"; public static final String INVITE_TOKEN_ENTITY_NAME = "inviteToken"; public static final String DATAHUB_ROLE_ENTITY_NAME = "dataHubRole"; + public static final String POST_ENTITY_NAME = "post"; /** @@ -243,7 +244,6 @@ public class Constants { public static final String IS_MEMBER_OF_GROUP_RELATIONSHIP_NAME = "IsMemberOfGroup"; public static final String IS_MEMBER_OF_NATIVE_GROUP_RELATIONSHIP_NAME = "IsMemberOfNativeGroup"; - // acryl-main only public static final String CHANGE_EVENT_PLATFORM_EVENT_NAME = "entityChangeEvent"; /** @@ -258,6 +258,14 @@ public class Constants { public static final String DATA_PROCESS_INSTANCE_PROPERTIES_ASPECT_NAME = "dataProcessInstanceProperties"; public static final String DATA_PROCESS_INSTANCE_RUN_EVENT_ASPECT_NAME = "dataProcessInstanceRunEvent"; + // Posts + public static final String POST_INFO_ASPECT_NAME = "postInfo"; + public static final String LAST_MODIFIED_FIELD_NAME = "lastModified"; + + // Telemetry + public static final String CLIENT_ID_URN = "urn:li:telemetry:clientId"; + public static final String CLIENT_ID_ASPECT = "telemetryClientId"; + private Constants() { } } diff --git a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java index d781d2b325a0cb..2af6d78b41612c 100644 --- a/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java +++ b/metadata-dao-impl/kafka-producer/src/main/java/com/linkedin/metadata/dao/producer/KafkaEventProducer.java @@ -10,6 +10,7 @@ import com.linkedin.mxe.MetadataAuditEvent; import com.linkedin.mxe.MetadataAuditOperation; import com.linkedin.mxe.MetadataChangeLog; +import com.linkedin.mxe.MetadataChangeProposal; import com.linkedin.mxe.PlatformEvent; import com.linkedin.mxe.SystemMetadata; import com.linkedin.mxe.TopicConvention; @@ -155,6 +156,43 @@ record = EventUtils.pegasusToAvroMCL(metadataChangeLog); } } + @Override + @WithSpan + public void produceMetadataChangeProposal(@Nonnull final MetadataChangeProposal metadataChangeProposal) { + GenericRecord record; + + Urn urn = metadataChangeProposal.getEntityUrn(); + if (urn == null) { + throw new IllegalArgumentException("Urn for proposal cannot be null."); + } + try { + log.debug(String.format("Converting Pegasus snapshot to Avro snapshot urn %s\nMetadataChangeProposal: %s", + urn, + metadataChangeProposal)); + record = EventUtils.pegasusToAvroMCP(metadataChangeProposal); + } catch (IOException e) { + log.error(String.format("Failed to convert Pegasus MCP to Avro: %s", metadataChangeProposal), e); + throw new ModelConversionException("Failed to convert Pegasus MCP to Avro", e); + } + + String topic = _topicConvention.getMetadataChangeProposalTopicName(); + if (_callback.isPresent()) { + _producer.send(new ProducerRecord(topic, urn.toString(), record), _callback.get()); + } else { + _producer.send(new ProducerRecord(topic, urn.toString(), record), (metadata, e) -> { + if (e != null) { + log.error(String.format("Failed to emit MCP for entity with urn %s", urn), e); + } else { + log.debug(String.format("Successfully emitted MCP for entity with urn %s at offset %s, partition %s, topic %s", + urn, + metadata.offset(), + metadata.partition(), + metadata.topic())); + } + }); + } + } + @Override public void producePlatformEvent(@Nonnull String name, @Nullable String key, @Nonnull PlatformEvent event) { GenericRecord record; diff --git a/metadata-events/mxe-schemas/bin/mainGeneratedDataTemplate/com/linkedin/data/template/StringArrayArray.class b/metadata-events/mxe-schemas/bin/mainGeneratedDataTemplate/com/linkedin/data/template/StringArrayArray.class new file mode 100644 index 00000000000000..dfb99ce63bd6d9 Binary files /dev/null and b/metadata-events/mxe-schemas/bin/mainGeneratedDataTemplate/com/linkedin/data/template/StringArrayArray.class differ diff --git a/metadata-events/mxe-utils-avro-1.7/src/main/java/com/linkedin/metadata/EventUtils.java b/metadata-events/mxe-utils-avro-1.7/src/main/java/com/linkedin/metadata/EventUtils.java index 582c519392a47c..dca9ef3865454a 100644 --- a/metadata-events/mxe-utils-avro-1.7/src/main/java/com/linkedin/metadata/EventUtils.java +++ b/metadata-events/mxe-utils-avro-1.7/src/main/java/com/linkedin/metadata/EventUtils.java @@ -171,7 +171,7 @@ public static PlatformEvent avroToPegasusPE(@Nonnull GenericRecord record) throw public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) throws IOException { GenericRecord original = DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MAE_AVRO_SCHEMA); - return renameSchemaNamespace(original, ORIGINAL_MAE_AVRO_SCHEMA, RENAMED_MAE_AVRO_SCHEMA); + return renameSchemaNamespace(original, RENAMED_MAE_AVRO_SCHEMA); } /** @@ -185,7 +185,21 @@ public static GenericRecord pegasusToAvroMAE(@Nonnull MetadataAuditEvent event) public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) throws IOException { GenericRecord original = DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCL_AVRO_SCHEMA); - return renameSchemaNamespace(original, ORIGINAL_MCL_AVRO_SCHEMA, RENAMED_MCL_AVRO_SCHEMA); + return renameSchemaNamespace(original, RENAMED_MCL_AVRO_SCHEMA); + } + + /** + * Converts a Pegasus MAE into the equivalent Avro model as a {@link GenericRecord}. + * + * @param event the Pegasus {@link MetadataChangeProposal} model + * @return the Avro model with com.linkedin.pegasus2avro.mxe namesapce + * @throws IOException if the conversion fails + */ + @Nonnull + public static GenericRecord pegasusToAvroMCP(@Nonnull MetadataChangeProposal event) throws IOException { + GenericRecord original = + DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCP_AVRO_SCHEMA); + return renameSchemaNamespace(original, RENAMED_MCP_AVRO_SCHEMA); } /** @@ -199,7 +213,7 @@ public static GenericRecord pegasusToAvroMCL(@Nonnull MetadataChangeLog event) t public static GenericRecord pegasusToAvroMCE(@Nonnull MetadataChangeEvent event) throws IOException { GenericRecord original = DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_MCE_AVRO_SCHEMA); - return renameSchemaNamespace(original, ORIGINAL_MCE_AVRO_SCHEMA, RENAMED_MCE_AVRO_SCHEMA); + return renameSchemaNamespace(original, RENAMED_MCE_AVRO_SCHEMA); } /** @@ -232,7 +246,7 @@ public static GenericRecord pegasusToAvroFailedMCE(@Nonnull FailedMetadataChange GenericRecord original = DataTranslator.dataMapToGenericRecord(failedMetadataChangeEvent.data(), failedMetadataChangeEvent.schema(), ORIGINAL_FAILED_MCE_AVRO_SCHEMA); - return renameSchemaNamespace(original, ORIGINAL_FAILED_MCE_AVRO_SCHEMA, RENAMED_FAILED_MCE_AVRO_SCHEMA); + return renameSchemaNamespace(original, RENAMED_FAILED_MCE_AVRO_SCHEMA); } /** @@ -248,7 +262,7 @@ public static GenericRecord pegasusToAvroFailedMCP( GenericRecord original = DataTranslator.dataMapToGenericRecord(failedMetadataChangeProposal.data(), failedMetadataChangeProposal.schema(), ORIGINAL_FMCL_AVRO_SCHEMA); - return renameSchemaNamespace(original, ORIGINAL_FMCL_AVRO_SCHEMA, RENAMED_FMCP_AVRO_SCHEMA); + return renameSchemaNamespace(original, RENAMED_FMCP_AVRO_SCHEMA); } /** @@ -262,13 +276,16 @@ public static GenericRecord pegasusToAvroFailedMCP( public static GenericRecord pegasusToAvroPE(@Nonnull PlatformEvent event) throws IOException { GenericRecord original = DataTranslator.dataMapToGenericRecord(event.data(), event.schema(), ORIGINAL_PE_AVRO_SCHEMA); - return renameSchemaNamespace(original, ORIGINAL_PE_AVRO_SCHEMA, RENAMED_PE_AVRO_SCHEMA); + return renameSchemaNamespace(original, RENAMED_PE_AVRO_SCHEMA); } /** * Converts original MXE into a renamed namespace + * Does a double convert that should not be necessary since we're already converting prior to calling this method + * in most spots */ @Nonnull + @Deprecated private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema originalSchema, @Nonnull Schema newSchema) throws IOException { @@ -279,6 +296,16 @@ private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord origin return changeSchema(record, newSchema, newSchema); } + /** + * Converts original MXE into a renamed namespace + */ + @Nonnull + private static GenericRecord renameSchemaNamespace(@Nonnull GenericRecord original, @Nonnull Schema newSchema) + throws IOException { + + return changeSchema(original, newSchema, newSchema); + } + /** * Changes the schema of a {@link GenericRecord} to a compatible schema * diff --git a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py index f7b384c1d32390..3c0be550dae9bc 100644 --- a/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py +++ b/metadata-ingestion-modules/airflow-plugin/src/datahub_airflow_plugin/datahub_plugin.py @@ -18,6 +18,7 @@ def get_lineage_config() -> DatahubLineageConfig: """Load the lineage config from airflow.cfg.""" + enabled = conf.get("datahub", "enabled", fallback=True) datahub_conn_id = conf.get("datahub", "conn_id", fallback="datahub_rest_default") cluster = conf.get("datahub", "cluster", fallback="prod") graceful_exceptions = conf.get("datahub", "graceful_exceptions", fallback=True) @@ -27,6 +28,7 @@ def get_lineage_config() -> DatahubLineageConfig: ) capture_executions = conf.get("datahub", "capture_executions", fallback=True) return DatahubLineageConfig( + enabled=enabled, datahub_conn_id=datahub_conn_id, cluster=cluster, graceful_exceptions=graceful_exceptions, diff --git a/metadata-ingestion/docs/sources/bigquery/bigquery-beta_pre.md b/metadata-ingestion/docs/sources/bigquery/bigquery-beta_pre.md index c2fa46ba0a261f..6162a99a7a5220 100644 --- a/metadata-ingestion/docs/sources/bigquery/bigquery-beta_pre.md +++ b/metadata-ingestion/docs/sources/bigquery/bigquery-beta_pre.md @@ -1,56 +1,53 @@ ### Prerequisites +To understand how BigQuery ingestion needs to be set up, first familiarize yourself with the concepts in the diagram below: +

+ +

-#### Create a datahub profile in GCP +There are two important concepts to understand and identify: +- *Extractor Project*: This is the project associated with a service-account, whose credentials you will be configuring in the connector. The connector uses this service-account to run jobs (including queries) within the project. +- *Bigquery Projects* are the projects from which table metadata, lineage, usage, and profiling data need to be collected. By default, the extractor project is included in the list of projects that DataHub collects metadata from, but you can control that by passing in a specific list of project ids that you want to collect metadata from. Read the configuration section below to understand how to limit the list of projects that DataHub extracts metadata from. +#### Create a datahub profile in GCP 1. Create a custom role for datahub as per [BigQuery docs](https://cloud.google.com/iam/docs/creating-custom-roles#creating_a_custom_role). -2. Grant the following permissions to this role: +2. Follow the sections below to grant permissions to this role on this project and other projects. + +##### Basic Requirements (needed for metadata ingestion) +1. Identify your Extractor Project where the service account will run queries to extract metadata. + +| permission                       | Description                                                                                                                         | Capability                                                               | +|----------------------------------|-------------------------------------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------| +| `bigquery.jobs.create`           | Run jobs (e.g. queries) within the project. *This only needs for the extractor project where the service account belongs*           |                                                                                                               | +| `bigquery.jobs.list`             | Manage the queries that the service account has sent. *This only needs for the extractor project where the service account belongs* |                                                                                                               | +| `bigquery.readsessions.create`   | Create a session for streaming large results. *This only needs for the extractor project where the service account belongs*         |                                                                                                               | +| `bigquery.readsessions.getData` | Get data from the read session. *This only needs for the extractor project where the service account belongs*                       | +| `bigquery.tables.create`         | Create temporary tables when profiling tables. Tip: Use the `profiling.bigquery_temp_table_schema` to ensure that all temp tables (across multiple projects) are created in this project under a specific dataset.                 | Profiling                           |                                                                                                                 | +| `bigquery.tables.delete`         | Delete temporary tables when profiling tables. Tip: Use the `profiling.bigquery_temp_table_schema` to ensure that all temp tables (across multiple projects) are created in this project under a specific dataset.                   | Profiling                           |                                                                                                                 | +2. Grant the following permissions to the Service Account on every project where you would like to extract metadata from :::info If you have multiple projects in your BigQuery setup, the role should be granted these permissions in each of the projects. ::: - -##### Basic Requirements (needs for metadata ingestion) - -| permission | Description | -| -------------------------------- | ----------------------------------------------------- | -| `bigquery.datasets.get` | Retrieve metadata about a dataset. | -| `bigquery.datasets.getIamPolicy` | Read a dataset's IAM permissions. | -| `bigquery.jobs.create` | Run jobs (e.g. queries) within the project. | -| `bigquery.jobs.list` | Manage the queries that the service account has sent. | -| `bigquery.tables.list` | List BigQuery tables. | -| `bigquery.tables.get` | Retrieve metadata for a table. | -| `bigquery.readsessions.create` | Create a session for streaming large results. | -| `bigquery.readsessions.getData` | Get data from the read session. | -| `resourcemanager.projects.get` | Retrieve project names and metadata. | - -##### Lineage/usage generation requirements - -Additional requirements needed on the top of the basic requirements. -If you want to get lineage from multiple projects you have to grant this permission -for each of them. - -| permission | Description | -| -------------------------------- | ------------------------------------------------------------------------------------------------------------ | -| `bigquery.jobs.listAll` | List all jobs (queries) submitted by any user. | -| `logging.logEntries.list` | Fetch log entries for lineage/usage data. Not required if `use_exported_bigquery_audit_metadata` is enabled. | -| `logging.privateLogEntries.list` | Fetch log entries for lineage/usage data. Not required if `use_exported_bigquery_audit_metadata` is enabled. | - -##### Profiling requirements - -Additional requirements needed on the top of the basic requirements. - -| permission | Description | -| ------------------------- | ----------------------------------------------------------------------------------------- | -| `bigquery.tables.getData` | Access table data to do the profiling. | -| `bigquery.tables.create` | Create temporary tables when profiling partitioned/sharded tables. See below for details. | -| `bigquery.tables.delete` | Delete temporary tables when profiling partitioned/sharded tables. See below for details. | - -Profiler creates temporary tables to profile partitioned/sharded tables and that is why it needs table create/delete privilege. +| permission                       | Description                                                                                                 | Capability               | Default GCP role which contains this permission                                                                 | +|----------------------------------|--------------------------------------------------------------------------------------------------------------|-------------------------------------|-----------------------------------------------------------------------------------------------------------------| +| `bigquery.datasets.get`         | Retrieve metadata about a dataset.                                                                           | Table Metadata Extraction           | [roles/bigquery.metadataViewer](https://cloud.google.com/bigquery/docs/access-control#bigquery.metadataViewer) | +| `bigquery.datasets.getIamPolicy` | Read a dataset's IAM permissions.                                                                           | Table Metadata Extraction           | [roles/bigquery.metadataViewer](https://cloud.google.com/bigquery/docs/access-control#bigquery.metadataViewer) | +| `bigquery.tables.list`           | List BigQuery tables.                                                                                       | Table Metadata Extraction           | [roles/bigquery.metadataViewer](https://cloud.google.com/bigquery/docs/access-control#bigquery.metadataViewer) | +| `bigquery.tables.get`           | Retrieve metadata for a table.                                                                               | Table Metadata Extraction           | [roles/bigquery.metadataViewer](https://cloud.google.com/bigquery/docs/access-control#bigquery.metadataViewer) | +| `resourcemanager.projects.get`   | Retrieve project names and metadata.                                                                         | Table Metadata Extraction           | [roles/bigquery.metadataViewer](https://cloud.google.com/bigquery/docs/access-control#bigquery.metadataViewer) | +| `bigquery.jobs.listAll`         | List all jobs (queries) submitted by any user. Needs for Lineage extraction.                                 | Lineage Extraction/Usage extraction | [roles/bigquery.resourceViewer](https://cloud.google.com/bigquery/docs/access-control#bigquery.resourceViewer) | +| `logging.logEntries.list`       | Fetch log entries for lineage/usage data. Not required if `use_exported_bigquery_audit_metadata` is enabled. | Lineage Extraction/Usage extraction | [roles/logging.privateLogViewer](https://cloud.google.com/logging/docs/access-control#logging.privateLogViewer) | +| `logging.privateLogEntries.list` | Fetch log entries for lineage/usage data. Not required if `use_exported_bigquery_audit_metadata` is enabled. | Lineage Extraction/Usage extraction | [roles/logging.privateLogViewer](https://cloud.google.com/logging/docs/access-control#logging.privateLogViewer) | +| `bigquery.tables.getData`       | Access table data to extract storage size, last updated at, data profiles etc. | Profiling                           |                                                                                                                 | +| `bigquery.tables.create`         | [Optional] Only needed if not using the `profiling.bigquery_temp_table_schema` config option. | Profiling                           |                                                                                                                 | +| `bigquery.tables.delete`         | [Optional] Only needed if not using the `profiling.bigquery_temp_table_schema` config option. | Profiling                           |                                                                                                                 | + +The profiler creates temporary tables to profile partitioned/sharded tables and that is why it needs table create/delete privilege. Use `profiling.bigquery_temp_table_schema` to restrict to one specific dataset the create/delete permission -#### Create a service account +#### Create a service account in the Extractor Project 1. Setup a ServiceAccount as per [BigQuery docs](https://cloud.google.com/iam/docs/creating-managing-service-accounts#iam-service-accounts-create-console) and assign the previously created role to this service account. @@ -59,16 +56,16 @@ Use `profiling.bigquery_temp_table_schema` to restrict to one specific dataset t ```json { - "type": "service_account", - "project_id": "project-id-1234567", - "private_key_id": "d0121d0000882411234e11166c6aaa23ed5d74e0", - "private_key": "-----BEGIN PRIVATE KEY-----\nMIIyourkey\n-----END PRIVATE KEY-----", - "client_email": "test@suppproject-id-1234567.iam.gserviceaccount.com", - "client_id": "113545814931671546333", - "auth_uri": "https://accounts.google.com/o/oauth2/auth", - "token_uri": "https://oauth2.googleapis.com/token", - "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", - "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/test%suppproject-id-1234567.iam.gserviceaccount.com" + "type": "service_account", + "project_id": "project-id-1234567", + "private_key_id": "d0121d0000882411234e11166c6aaa23ed5d74e0", + "private_key": "-----BEGIN PRIVATE KEY-----\nMIIyourkey\n-----END PRIVATE KEY-----", + "client_email": "test@suppproject-id-1234567.iam.gserviceaccount.com", + "client_id": "113545814931671546333", + "auth_uri": "https://accounts.google.com/o/oauth2/auth", + "token_uri": "https://oauth2.googleapis.com/token", + "auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs", + "client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/test%suppproject-id-1234567.iam.gserviceaccount.com" } ``` @@ -86,11 +83,11 @@ Use `profiling.bigquery_temp_table_schema` to restrict to one specific dataset t ```yml credential: - project_id: project-id-1234567 - private_key_id: "d0121d0000882411234e11166c6aaa23ed5d74e0" - private_key: "-----BEGIN PRIVATE KEY-----\nMIIyourkey\n-----END PRIVATE KEY-----\n" - client_email: "test@suppproject-id-1234567.iam.gserviceaccount.com" - client_id: "123456678890" + project_id: project-id-1234567 + private_key_id: "d0121d0000882411234e11166c6aaa23ed5d74e0" + private_key: "-----BEGIN PRIVATE KEY-----\nMIIyourkey\n-----END PRIVATE KEY-----\n" + client_email: "test@suppproject-id-1234567.iam.gserviceaccount.com" + client_id: "123456678890" ``` ### Lineage Computation Details @@ -112,8 +109,8 @@ Temporary tables are removed after profiling. ```yaml profiling: - enabled: true - bigquery_temp_table_schema: my-project-id.my-schema-where-views-can-be-created + enabled: true + bigquery_temp_table_schema: my-project-id.my-schema-where-views-can-be-created ``` :::note diff --git a/metadata-ingestion/docs/sources/tableau/tableau.md b/metadata-ingestion/docs/sources/tableau/tableau_pre.md similarity index 95% rename from metadata-ingestion/docs/sources/tableau/tableau.md rename to metadata-ingestion/docs/sources/tableau/tableau_pre.md index dda55ececef879..838d9117420127 100644 --- a/metadata-ingestion/docs/sources/tableau/tableau.md +++ b/metadata-ingestion/docs/sources/tableau/tableau_pre.md @@ -6,6 +6,33 @@ In order to ingest metadata from tableau, you will need: - [Enable the Tableau Metadata API](https://help.tableau.com/current/api/metadata_api/en-us/docs/meta_api_start.html#enable-the-tableau-metadata-api-for-tableau-server) for Tableau Server, if its not already enabled. - Tableau Credentials (Username/Password or [Personal Access Token](https://help.tableau.com/current/pro/desktop/en-us/useracct.htm#create-and-revoke-personal-access-tokens)) +### Ingestion through UI + +The following video shows you how to get started with ingesting Tableau metadata through the UI. + +
+