Skip to content

Commit

Permalink
use list instead of set for ids since no duplicate id
Browse files Browse the repository at this point in the history
Change-Id: I4e775b77964980644b08d8569315b4ab9af0701f
  • Loading branch information
javeme committed Jul 29, 2021
1 parent 56e18d1 commit 97b6001
Show file tree
Hide file tree
Showing 6 changed files with 33 additions and 27 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@

import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

import javax.inject.Singleton;
Expand All @@ -44,6 +45,7 @@
import com.baidu.hugegraph.api.graph.EdgeAPI;
import com.baidu.hugegraph.api.graph.VertexAPI;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.query.QueryResults;
import com.baidu.hugegraph.core.GraphManager;
import com.baidu.hugegraph.server.RestServer;
Expand All @@ -57,7 +59,7 @@
import com.baidu.hugegraph.util.Log;
import com.codahale.metrics.annotation.Timed;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableList;

@Path("graphs/{graph}/traversers/kneighbor")
@Singleton
Expand Down Expand Up @@ -132,18 +134,20 @@ public String post(@Context GraphManager manager,
request.limit);
}

int size = results.size();
Set<Id> neighbors = request.countOnly ?
ImmutableSet.of() : results.ids(request.limit);
long size = results.size();
if (request.limit != Query.NO_LIMIT && size > request.limit) {
size = request.limit;
}
List<Id> neighbors = request.countOnly ?
ImmutableList.of() : results.ids(request.limit);

HugeTraverser.PathSet paths = new HugeTraverser.PathSet();
if (request.withPath) {
paths.addAll(results.paths(request.limit));
}
Iterator<Vertex> iter = QueryResults.emptyIterator();
if (request.withVertex) {
Set<Id> ids = new HashSet<>();
ids.addAll(results.ids(request.limit));
if (request.withVertex && !request.countOnly) {
Set<Id> ids = new HashSet<>(neighbors);
if (request.withPath) {
for (HugeTraverser.Path p : paths) {
ids.addAll(p.vertices());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@

import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

import javax.inject.Singleton;
Expand All @@ -45,6 +46,7 @@
import com.baidu.hugegraph.api.graph.EdgeAPI;
import com.baidu.hugegraph.api.graph.VertexAPI;
import com.baidu.hugegraph.backend.id.Id;
import com.baidu.hugegraph.backend.query.Query;
import com.baidu.hugegraph.backend.query.QueryResults;
import com.baidu.hugegraph.core.GraphManager;
import com.baidu.hugegraph.server.RestServer;
Expand All @@ -58,7 +60,7 @@
import com.baidu.hugegraph.util.Log;
import com.codahale.metrics.annotation.Timed;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableList;

@Path("graphs/{graph}/traversers/kout")
@Singleton
Expand Down Expand Up @@ -141,18 +143,20 @@ public String post(@Context GraphManager manager,
request.limit);
}

int size = results.size();
Set<Id> neighbors = request.countOnly ?
ImmutableSet.of() : results.ids(request.limit);
long size = results.size();
if (request.limit != Query.NO_LIMIT && size > request.limit) {
size = request.limit;
}
List<Id> neighbors = request.countOnly ?
ImmutableList.of() : results.ids(request.limit);

HugeTraverser.PathSet paths = new HugeTraverser.PathSet();
if (request.withPath) {
paths.addAll(results.paths(request.limit));
}
Iterator<Vertex> iter = QueryResults.emptyIterator();
if (request.withVertex) {
Set<Id> ids = new HashSet<>();
ids.addAll(results.ids(request.limit));
if (request.withVertex && !request.countOnly) {
Set<Id> ids = new HashSet<>(neighbors);
if (request.withPath) {
for (HugeTraverser.Path p : paths) {
ids.addAll(p.vertices());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.tinkerpop.gremlin.process.traversal.dsl.graph.GraphTraversal;
import org.apache.tinkerpop.gremlin.structure.Edge;
Expand Down Expand Up @@ -311,17 +310,17 @@ public String writeWeightedPaths(WeightedPaths paths,
}

@Override
public String writeNodesWithPath(String name, Set<Id> nodes, int size,
public String writeNodesWithPath(String name, List<Id> nodes, long size,
Collection<HugeTraverser.Path> paths,
Iterator<Vertex> iterator) {
Iterator<Vertex> vertices) {
List<Map<String, Object>> pathList = new ArrayList<>();
for (HugeTraverser.Path path : paths) {
pathList.add(path.toMap(false));
}

Map<String, Object> results;
results = ImmutableMap.of(name, nodes, "size", size,
"paths", pathList, "vertices", iterator);
"paths", pathList, "vertices", vertices);
return JsonUtil.toJson(results);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.tinkerpop.gremlin.structure.Edge;
import org.apache.tinkerpop.gremlin.structure.Vertex;
Expand Down Expand Up @@ -101,7 +100,7 @@ public String writeWeightedPath(NodeWithWeight path,
public String writeWeightedPaths(WeightedPaths paths,
Iterator<Vertex> vertices);

public String writeNodesWithPath(String name, Set<Id> nodes, int size,
public String writeNodesWithPath(String name, List<Id> nodes, long size,
Collection<HugeTraverser.Path> paths,
Iterator<Vertex> iterator);
Iterator<Vertex> vertices);
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import static com.baidu.hugegraph.backend.query.Query.NO_LIMIT;

import java.util.Set;
import java.util.List;
import java.util.Stack;

import com.baidu.hugegraph.backend.id.Id;
Expand All @@ -44,8 +44,8 @@ public int size() {
return (int) this.accessed();
}

public Set<Id> ids(long limit) {
Set<Id> ids = CollectionFactory.newIdSet(CollectionType.EC);
public List<Id> ids(long limit) {
List<Id> ids = CollectionFactory.newList(CollectionType.EC);
Stack<Record> records = this.records();
// Not include record(i=0) to ignore source vertex
for (int i = 1; i < records.size(); i++) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@

import static com.baidu.hugegraph.backend.query.Query.NO_LIMIT;

import java.util.Set;
import java.util.List;
import java.util.Stack;

import com.baidu.hugegraph.backend.id.Id;
Expand All @@ -44,9 +44,9 @@ public int size() {
return this.currentRecord().size();
}

public Set<Id> ids(long limit) {
public List<Id> ids(long limit) {
List<Id> ids = CollectionFactory.newList(CollectionType.EC);
IntIterator iterator = this.records().peek().keys();
Set<Id> ids = CollectionFactory.newIdSet(CollectionType.EC);
while ((limit == NO_LIMIT || limit-- > 0L) && iterator.hasNext()) {
ids.add(this.id(iterator.next()));
}
Expand Down

0 comments on commit 97b6001

Please sign in to comment.