Skip to content
This repository has been archived by the owner on Jun 26, 2024. It is now read-only.

Commit

Permalink
fix(entities): total entites was never fetched for composite sources (#…
Browse files Browse the repository at this point in the history
…61)

* chore: using data fetcher node instead of selection and filter node

* added unit tests

* added debug logs and comments

* optimize filter and order by single source

* remove redundant method

* remove redundant method

* fix optimization for QS

* remove redundant method

* remove redundant utils method

* revert back entity id equals filter

* remove entity id equals filter

* remove redundant code

* added entity response

* added unit tests

* data fetcher node is paginated for non null limit and offset

* got rid of the total entities request

* set equals in unit test method

* removed redundant total entities test
  • Loading branch information
skjindal93 authored Jan 15, 2021
1 parent e906caf commit a0e5c44
Show file tree
Hide file tree
Showing 22 changed files with 465 additions and 843 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -180,19 +180,4 @@ public EntityFetcherResponse getTimeAggregatedMetrics(
EntitiesRequestContext requestContext, EntitiesRequest entitiesRequest) {
throw new UnsupportedOperationException("Fetching time series data not supported by EDS");
}

@Override
public int getTotalEntities(EntitiesRequestContext requestContext, EntitiesRequest entitiesRequest) {
EntityFetcherResponse entityFetcherResponse = getEntities(
requestContext,
EntitiesRequest.newBuilder(entitiesRequest)
.clearSelection()
.clearTimeAggregation()
.clearOrderBy()
.clearLimit()
.setOffset(0)
.build()
);
return entityFetcherResponse.size();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import java.util.LinkedHashMap;
import java.util.Map;

import org.hypertrace.gateway.service.entity.EntityKey;
import org.hypertrace.gateway.service.v1.entity.Entity.Builder;

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package org.hypertrace.gateway.service.common.datafetcher;

import org.hypertrace.gateway.service.entity.EntityKey;

import java.util.HashSet;
import java.util.Set;

public class EntityResponse {
private final EntityFetcherResponse entityFetcherResponse;
// set of entity keys, irrespective of offset and limit
private final Set<EntityKey> entityKeys;

public EntityResponse() {
this.entityFetcherResponse = new EntityFetcherResponse();
this.entityKeys = new HashSet<>();
}

public EntityResponse(EntityFetcherResponse entityFetcherResponse, Set<EntityKey> entityKeys) {
this.entityFetcherResponse = entityFetcherResponse;
this.entityKeys = entityKeys;
}

public EntityFetcherResponse getEntityFetcherResponse() {
return entityFetcherResponse;
}

public Set<EntityKey> getEntityKeys() {
return entityKeys;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,10 @@
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Set;

import org.hypertrace.gateway.service.entity.EntitiesRequestContext;
import org.hypertrace.gateway.service.entity.EntityKey;
import org.hypertrace.gateway.service.v1.common.Interval;
import org.hypertrace.gateway.service.v1.common.MetricSeries;
import org.hypertrace.gateway.service.v1.entity.EntitiesRequest;
Expand Down Expand Up @@ -44,8 +47,6 @@ EntityFetcherResponse getAggregatedMetrics(
EntityFetcherResponse getTimeAggregatedMetrics(
EntitiesRequestContext requestContext, EntitiesRequest entitiesRequest);

int getTotalEntities(EntitiesRequestContext requestContext, EntitiesRequest entitiesRequest);

default MetricSeries getSortedMetricSeries(MetricSeries.Builder builder) {
List<Interval> sortedIntervals = new ArrayList<>(builder.getValueList());
sortedIntervals.sort(Comparator.comparingLong(Interval::getStartTimeMillis));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -553,31 +553,6 @@ public EntityFetcherResponse getTimeAggregatedMetrics(
return new EntityFetcherResponse(resultMap);
}

@Override
public int getTotalEntities(EntitiesRequestContext requestContext, EntitiesRequest entitiesRequest) {
Map<String, AttributeMetadata> attributeMetadataMap =
attributeMetadataProvider.getAttributesMetadata(
requestContext, entitiesRequest.getEntityType());
// Validate EntitiesRequest
entitiesRequestValidator.validate(entitiesRequest, attributeMetadataMap);
return getTotalEntitiesForMultipleEntityId(requestContext, entitiesRequest);
}

private int getTotalEntitiesForMultipleEntityId(EntitiesRequestContext requestContext,
EntitiesRequest entitiesRequest) {
EntityFetcherResponse entityFetcherResponse = getEntities(
requestContext,
EntitiesRequest.newBuilder(entitiesRequest)
.clearSelection()
.clearTimeAggregation()
.clearOrderBy()
.setOffset(0)
.setLimit(QueryServiceClient.DEFAULT_QUERY_SERVICE_GROUP_BY_LIMIT)
.build()
);
return entityFetcherResponse.size();
}

private QueryRequest buildTimeSeriesQueryRequest(
EntitiesRequest entitiesRequest,
EntitiesRequestContext context,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang3.StringUtils;
import org.hypertrace.core.attribute.service.v1.AttributeMetadata;
Expand All @@ -19,6 +20,7 @@
import org.hypertrace.gateway.service.common.datafetcher.EntityDataServiceEntityFetcher;
import org.hypertrace.gateway.service.common.datafetcher.EntityFetcherResponse;
import org.hypertrace.gateway.service.common.datafetcher.EntityInteractionsFetcher;
import org.hypertrace.gateway.service.common.datafetcher.EntityResponse;
import org.hypertrace.gateway.service.common.datafetcher.QueryServiceEntityFetcher;
import org.hypertrace.gateway.service.common.transformer.RequestPreProcessor;
import org.hypertrace.gateway.service.common.transformer.ResponsePostProcessor;
Expand Down Expand Up @@ -138,24 +140,26 @@ public EntitiesResponse getEntities(
* EntityQueryHandlerRegistry.get() returns Singleton object, so, it's guaranteed that
* it won't create new object for each request.
*/
EntityFetcherResponse response =
EntityResponse response =
executionTree.acceptVisitor(new ExecutionVisitor(executionContext, EntityQueryHandlerRegistry.get()));

EntityFetcherResponse entityFetcherResponse = response.getEntityFetcherResponse();
Set<EntityKey> allEntityKeys = response.getEntityKeys();

List<Entity.Builder> results =
this.responsePostProcessor.transform(
executionContext, new ArrayList<>(response.getEntityKeyBuilderMap().values()));
executionContext, new ArrayList<>(entityFetcherResponse.getEntityKeyBuilderMap().values()));

// Add interactions.
if (!results.isEmpty()) {
addEntityInteractions(
tenantId, preProcessedRequest, response.getEntityKeyBuilderMap(), requestHeaders);
tenantId, preProcessedRequest, entityFetcherResponse.getEntityKeyBuilderMap(), requestHeaders);
}

EntitiesResponse.Builder responseBuilder =
EntitiesResponse.newBuilder().setTotal(executionContext.getTotal());
results.forEach(e -> {
responseBuilder.addEntity(e.build());
});
EntitiesResponse.newBuilder().setTotal(allEntityKeys.size());

results.forEach(e -> responseBuilder.addEntity(e.build()));

long queryExecutionTime = System.currentTimeMillis() - startTime;
if (queryExecutionTime > logConfig.getQueryThresholdInMillis()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,12 @@ public class DataFetcherNode implements QueryNode {
private Integer offset;
private List<OrderByExpression> orderByExpressionList = Collections.emptyList();

private final boolean isPaginated;

public DataFetcherNode(String source, Filter filter) {
this.source = source;
this.filter = filter;
this.isPaginated = false;
}

public DataFetcherNode(
Expand All @@ -37,6 +40,7 @@ public DataFetcherNode(
this.limit = limit;
this.offset = offset;
this.orderByExpressionList = orderByExpressionList;
this.isPaginated = limit != null && offset != null;
}

public String getSource() {
Expand All @@ -59,6 +63,10 @@ public List<OrderByExpression> getOrderByExpressionList() {
return orderByExpressionList;
}

public boolean isPaginated() {
return isPaginated;
}

@Override
public <R> R acceptVisitor(Visitor<R> v) {
return v.visit(this);
Expand All @@ -72,6 +80,7 @@ public String toString() {
", limit=" + limit +
", offset=" + offset +
", orderByExpressionList=" + orderByExpressionList +
", isPaginated=" + isPaginated +
'}';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,6 @@ public class ExecutionContext {
// map of filter, selections (attribute, metrics, aggregations), order by attributes to source map
private final Map<String, Set<String>> allAttributesToSourcesMap = new HashMap<>();

/** Following fields set during the query execution phase * */
// Total number of entities. Set during the execution before pagination
private int total;

private ExecutionContext(
AttributeMetadataProvider attributeMetadataProvider,
EntityIdColumnsConfigs entityIdColumnsConfigs,
Expand Down Expand Up @@ -158,14 +154,6 @@ public EntitiesRequestContext getEntitiesRequestContext() {
return this.entitiesRequestContext;
}

public int getTotal() {
return total;
}

public void setTotal(int total) {
this.total = total;
}

public Set<String> getPendingSelectionSources() {
return pendingSelectionSources;
}
Expand Down Expand Up @@ -444,7 +432,6 @@ public String toString() {
", pendingMetricAggregationSourcesForOrderBy=" + pendingMetricAggregationSourcesForOrderBy +
", sortAndPaginationNodeAdded=" + sortAndPaginationNodeAdded +
", allAttributesToSourcesMap=" + allAttributesToSourcesMap +
", total=" + total +
'}';
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,6 @@ public QueryNode build() {
QueryNode rootNode = buildExecutionTreeForSameSourceFilterAndSelection(source);

rootNode.acceptVisitor(new ExecutionContextBuilderVisitor(executionContext));

QueryNode executionTree = buildExecutionTree(executionContext, rootNode);

if (LOG.isDebugEnabled()) {
Expand Down Expand Up @@ -163,14 +162,6 @@ private QueryNode buildExecutionTreeForQsFilterAndSelection() {
QueryNode rootNode = createQsDataFetcherNodeWithPagination(entitiesRequest);
executionContext.setSortAndPaginationNodeAdded(true);

// If the request has an EntityId EQ filter then there's no need for the 2nd request to get the
// total entities. So no need to set the TotalFetcherNode
if (ExecutionTreeUtils.hasEntityIdEqualsFilter(executionContext)) {
executionContext.setTotal(1);
} else {
rootNode = new TotalFetcherNode(rootNode, QS.name());
}

return rootNode;
}

Expand All @@ -182,13 +173,6 @@ private QueryNode buildExecutionTreeForEdsFilterAndSelection() {

QueryNode rootNode = new DataFetcherNode(EDS.name(), filter, selectionLimit, selectionOffset, orderBys);
executionContext.setSortAndPaginationNodeAdded(true);

if (ExecutionTreeUtils.hasEntityIdEqualsFilter(executionContext)) {
executionContext.setTotal(1);
} else {
rootNode = new TotalFetcherNode(rootNode, EDS.name());
}

return rootNode;
}

Expand Down Expand Up @@ -285,6 +269,7 @@ QueryNode buildFilterTree(EntitiesRequest entitiesRequest, Filter filter) {
.getSourcesList();

// if the filter by and order by are from QS, pagination can be pushed down to QS

// There will always be a DataFetcherNode for QS, because the results are always fetched
// within a time range. Hence, we can only push pagination down to QS and not any other
// sources, since we will always have a time range filter on QS
Expand Down
Loading

0 comments on commit a0e5c44

Please sign in to comment.