Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Improve logging in GMS and datahub-frontend #2761

Merged
merged 19 commits into from
Jun 25, 2021
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 7 additions & 5 deletions datahub-frontend/app/controllers/Application.java
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.typesafe.config.Config;
import org.apache.commons.lang3.StringUtils;
import play.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import play.Play;
import play.libs.Json;
import play.mvc.BodyParser;
Expand Down Expand Up @@ -32,6 +33,7 @@

public class Application extends Controller {

private final Logger _logger = LoggerFactory.getLogger(Application.class.getName());
private final Config _config;

@Inject
Expand Down Expand Up @@ -74,7 +76,7 @@ public Result printDeps() {
try (BufferedReader reader = new BufferedReader(new FileReader(commitFile))) {
commit = reader.readLine();
} catch (IOException ioe) {
Logger.error("Error while reading commit file. Error message: " + ioe.getMessage());
_logger.error("Error while reading commit file. Error message: " + ioe.getMessage());
}

//get all the files from /libs directory
Expand All @@ -84,7 +86,7 @@ public Result printDeps() {
paths.filter(Files::isRegularFile).
forEach(f -> sb.append(f.getFileName()).append("\n"));
} catch (IOException ioe) {
Logger.error("Error while traversing the directory. Error message: " + ioe.getMessage());
_logger.error("Error while traversing the directory. Error message: " + ioe.getMessage());
}

return ok("commit: " + commit + "\n" + "libraries: " + sb.toString());
Expand Down Expand Up @@ -242,7 +244,7 @@ public Result authenticate() throws NamingException {
try {
AuthenticationManager.authenticateUser(username, password);
} catch (AuthenticationException e) {
Logger.warn("Authentication error!", e);
_logger.warn("Authentication error!", e);
return badRequest("Invalid Credential");
}

Expand All @@ -255,7 +257,7 @@ public Result authenticate() throws NamingException {
String hashedUserName = AuthUtil.generateHash(username, secretKey.getBytes());
session("auth_token", hashedUserName);
} catch (NoSuchAlgorithmException | InvalidKeyException e) {
Logger.error("Failed to hash username", e);
_logger.error("Failed to hash username", e);
}

// Construct an ObjectNode with the username and uuid token to be sent with the response
Expand Down
27 changes: 27 additions & 0 deletions datahub-frontend/app/react/analytics/AnalyticsService.java
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,14 @@
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.Cardinality;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


public class AnalyticsService {

private final Logger _logger = LoggerFactory.getLogger(AnalyticsService.class.getName());

private final RestHighLevelClient _elasticClient;

private static final String INDEX_NAME = "datahub_usage_event";
Expand All @@ -60,6 +64,12 @@ public AnalyticsService(final RestHighLevelClient elasticClient) {
public List<NamedLine> getTimeseriesChart(String indexName, DateRange dateRange, DateInterval granularity,
Optional<String> dimension, // Length 1 for now
Map<String, List<String>> filters, Optional<String> uniqueOn) {

_logger.debug(
String.format("Invoked getTimeseriesChart with indexName: %s, dateRange: %s, granularity: %s, dimension: %s,",
indexName, dateRange, granularity, dimension)
+ String.format("filters: %s, uniqueOn: %s", filters, uniqueOn));

AggregationBuilder filteredAgg = getFilteredAggregation(filters, Optional.of(dateRange));

AggregationBuilder dateHistogram = AggregationBuilders.dateHistogram(DATE_HISTOGRAM)
Expand Down Expand Up @@ -88,6 +98,7 @@ public List<NamedLine> getTimeseriesChart(String indexName, DateRange dateRange,
new NamedLine("total", extractPointsFromAggregations(aggregationResult, uniqueOn.isPresent())));
}
} catch (Exception e) {
_logger.error(String.format("Caught exception while getting time series chart: %s", e.getMessage()));
return ImmutableList.of();
}
}
Expand All @@ -106,7 +117,13 @@ private List<NumericDataPoint> extractPointsFromAggregations(Aggregations aggreg
public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRange, List<String> dimensions,
// Length 1 or 2
Map<String, List<String>> filters, Optional<String> uniqueOn) {
_logger.debug(
String.format("Invoked getBarChart with indexName: %s, dateRange: %s, dimensions: %s,",
indexName, dateRange, dimensions)
+ String.format("filters: %s, uniqueOn: %s", filters, uniqueOn));

assert (dimensions.size() == 1 || dimensions.size() == 2);

AggregationBuilder filteredAgg = getFilteredAggregation(filters, dateRange);

AggregationBuilder termAgg = AggregationBuilders.terms(DIMENSION).field(dimensions.get(0)).missing(NA);
Expand Down Expand Up @@ -138,6 +155,7 @@ public List<NamedBar> getBarChart(String indexName, Optional<DateRange> dateRang
.collect(Collectors.toList());
}
} catch (Exception e) {
_logger.error(String.format("Caught exception while getting bar chart: %s", e.getMessage()));
return ImmutableList.of();
}
}
Expand All @@ -152,6 +170,12 @@ private List<BarSegment> extractBarSegmentsFromAggregations(Aggregations aggrega

public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRange, String groupBy,
Map<String, List<String>> filters, Optional<String> uniqueOn, int maxRows) {

_logger.debug(
String.format("Invoked getTopNTableChart with indexName: %s, dateRange: %s, groupBy: %s",
indexName, dateRange, groupBy)
+ String.format("filters: %s, uniqueOn: %s", filters, uniqueOn));

AggregationBuilder filteredAgg = getFilteredAggregation(filters, dateRange);

TermsAggregationBuilder termAgg = AggregationBuilders.terms(DIMENSION).field(groupBy).size(maxRows);
Expand All @@ -171,6 +195,7 @@ public List<Row> getTopNTableChart(String indexName, Optional<DateRange> dateRan
ImmutableList.of(bucket.getKeyAsString(), String.valueOf(extractCount(bucket, uniqueOn.isPresent())))))
.collect(Collectors.toList());
} catch (Exception e) {
_logger.error(String.format("Caught exception while getting top n chart: %s", e.getMessage()));
return ImmutableList.of();
}
}
Expand All @@ -189,6 +214,7 @@ public int getHighlights(String indexName, Optional<DateRange> dateRange, Map<St
return (int) aggregationResult.getDocCount();
}
} catch (Exception e) {
_logger.error(String.format("Caught exception while getting highlights: %s", e.getMessage()));
return 0;
}
}
Expand All @@ -208,6 +234,7 @@ private Filter executeAndExtract(SearchRequest searchRequest) {
// extract results, validated against document model as well
return searchResponse.getAggregations().<Filter>get(FILTERED);
} catch (Exception e) {
_logger.error(String.format("Search query failed: %s", e.getMessage()));
throw new ESQueryException("Search query failed:", e);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,8 @@
import org.pac4j.core.exception.HttpAction;
import org.pac4j.play.PlayWebContext;
import org.pac4j.play.http.PlayHttpActionAdapter;
import play.Logger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import play.libs.Json;
import play.mvc.Controller;
import play.mvc.Http;
Expand All @@ -32,6 +33,7 @@

public class AuthenticationController extends Controller {

private final Logger _logger = LoggerFactory.getLogger(AuthenticationController.class.getName());
private final Config _configs;
private final OidcConfigs _oidcConfigs;
private final JAASConfigs _jaasConfigs;
Expand Down Expand Up @@ -107,7 +109,7 @@ public Result logIn() {
try {
AuthenticationManager.authenticateUser(username, password);
} catch (NamingException e) {
Logger.warn("Authentication error", e);
_logger.error("Authentication error", e);
return badRequest("Invalid Credential");
}

Expand Down
19 changes: 19 additions & 0 deletions datahub-frontend/app/react/controllers/GraphQLController.java
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import com.linkedin.datahub.graphql.GraphQLEngine;
import com.typesafe.config.Config;
import org.apache.commons.io.IOUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import react.analytics.AnalyticsService;
import react.auth.Authenticator;
import graphql.ExecutionResult;
Expand All @@ -30,6 +32,8 @@

public class GraphQLController extends Controller {

private final Logger _logger = LoggerFactory.getLogger(GraphQLController.class.getName());

private static final String FRONTEND_SCHEMA_NAME = "datahub-frontend.graphql";

private static final String QUERY_TYPE = "Query";
Expand Down Expand Up @@ -82,6 +86,8 @@ public Result execute() throws Exception {
variables = new ObjectMapper().convertValue(variablesJson, new TypeReference<Map<String, Object>>(){ });
}

_logger.debug(String.format("Executing graphQL query: %s, variables: %s", queryJson, variablesJson));

/*
* Init QueryContext
*/
Expand All @@ -92,6 +98,19 @@ public Result execute() throws Exception {
*/
ExecutionResult executionResult = _engine.execute(queryJson.asText(), variables, context);

if (executionResult.getErrors().size() != 0) {
// There were GraphQL errors. Report in error logs.
_logger.error(String.format("Errors while executing graphQL query: %s, result: %s, errors: %s",
queryJson,
executionResult.toSpecification(),
executionResult.getErrors()));
} else {
_logger.debug(String.format("Executed graphQL query: %s, result: %s"),
queryJson,
executionResult.toSpecification());
}


/*
* Format & Return Response
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,8 @@
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.config.SslConfigs;
import org.apache.kafka.common.security.auth.SecurityProtocol;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import react.auth.Authenticator;
import javax.annotation.Nonnull;
import javax.inject.Inject;
Expand All @@ -25,6 +27,8 @@

public class TrackingController extends Controller {

private final Logger _logger = LoggerFactory.getLogger(TrackingController.class.getName());

private static final List<String> KAFKA_SSL_PROTOCOLS = Collections.unmodifiableList(
Arrays.asList(SecurityProtocol.SSL.name(),SecurityProtocol.SASL_SSL.name()));

Expand All @@ -38,6 +42,7 @@ public TrackingController(@Nonnull Config config) {
_config = config;
_isEnabled = !config.hasPath("analytics.enabled") || config.getBoolean("analytics.enabled");
if (_isEnabled) {
_logger.debug("Analytics tracking is enabled");
_producer = createKafkaProducer();
_topic = config.getString("analytics.tracking.topic");
} else {
Expand All @@ -60,8 +65,9 @@ public Result track() throws Exception {
} catch (Exception e) {
return badRequest();
}
final String actor = new PlayQueryContext(ctx(), _config).getActor();
try {
final String actor = new PlayQueryContext(ctx(), _config).getActor();
_logger.debug(String.format("Emitting product analytics event. actor: %s, event: %s", actor, event));
final ProducerRecord<String, String> record = new ProducerRecord<>(
_topic,
actor,
Expand All @@ -70,6 +76,7 @@ public Result track() throws Exception {
_producer.flush();
return ok();
} catch(Exception e) {
_logger.error(String.format("Failed to emit product analytics event. actor: %s, event: %s", actor, event));
return internalServerError(e.getMessage());
}
}
Expand Down
55 changes: 45 additions & 10 deletions datahub-frontend/conf/logback.xml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<configuration>
<property name="LOG_DIR" value="${LOG_DIR:- /tmp/datahub/logs}"/>
<configuration scan="true" scanPeriod="120 seconds">
<property name="LOG_DIR" value="${LOG_DIR:- /tmp/datahub/logs/datahub-frontend}"/>
<timestamp key="bySecond" datePattern="yyyy-MM-dd'_'HH-mm-ss"/>
<timestamp key="byDate" datePattern="yyyy-MM-dd"/>

Expand All @@ -10,20 +10,55 @@
</appender>

<appender name="FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}/datahub-frontend-${bySecond}.log</file>
<append>true</append>
<file>${LOG_DIR}/datahub-frontend.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_DIR}/datahub-frontend.%d{yyyy-dd-MM}-%i.log</FileNamePattern>
<!-- each archived file, size max 10MB -->
<maxFileSize>100MB</maxFileSize>
<!-- total size of all archive files, if total size > 10GB, it will delete old archived file -->
<totalSizeCap>10GB</totalSizeCap>
<!-- 30 days to keep -->
<maxHistory>30</maxHistory>
</rollingPolicy>
<encoder>
<pattern>%d{HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
<rollingPolicy class="ch.qos.logback.core.rolling.FixedWindowRollingPolicy">
<maxIndex>10</maxIndex>
<FileNamePattern>${LOG_DIR}/${byDate}.log.%i</FileNamePattern>
</appender>

<appender name="DEBUG_FILE" class="ch.qos.logback.core.rolling.RollingFileAppender">
<file>${LOG_DIR}/datahub-frontend.debug.log</file>
<rollingPolicy class="ch.qos.logback.core.rolling.SizeAndTimeBasedRollingPolicy">
<FileNamePattern>${LOG_DIR}/datahub-frontend.debug.%d{yyyy-dd-MM}-%i.log</FileNamePattern>
<!-- each archived file, size max 100MB -->
<maxFileSize>100MB</maxFileSize>
<!-- total size of all archive files, if total size > 2GB, it will delete old archived file -->
<totalSizeCap>2GB</totalSizeCap>
<!-- 1 days to keep -->
<maxHistory>1</maxHistory>
</rollingPolicy>
<triggeringPolicy class="ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy">
<MaxFileSize>20MB</MaxFileSize>
</triggeringPolicy>
<filter class="ch.qos.logback.classic.filter.LevelFilter">
<level>DEBUG</level>
</filter>
<encoder>
<pattern>%d{HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n</pattern>
</encoder>
</appender>

<!-- Change this to disable logging debug -->
<logger name="com.linkedin" level="debug">
<appender-ref ref="DEBUG_FILE"/>
</logger>
<logger name="controller" level="debug">
<appender-ref ref="DEBUG_FILE"/>
</logger>
<logger name="graphql" level="debug">
<appender-ref ref="DEBUG_FILE"/>
</logger>
<logger name="react" level="debug">
<appender-ref ref="DEBUG_FILE"/>
</logger>

<!-- Change this to disable logging debug -->
<root level="info">
<appender-ref ref="STDOUT"/>
<appender-ref ref="FILE"/>
Expand Down
Loading