Skip to content

Commit

Permalink
feat: add CROState to vertices; partial linearisation (#263)
Browse files Browse the repository at this point in the history
Co-authored-by: trungnotchung <[email protected]>
Co-authored-by: droak <[email protected]>
  • Loading branch information
3 people authored Dec 3, 2024
1 parent 76d22c9 commit 90d554e
Show file tree
Hide file tree
Showing 5 changed files with 500 additions and 155 deletions.
173 changes: 158 additions & 15 deletions packages/object/src/hashgraph/index.ts
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import * as crypto from "node:crypto";
import { log } from "../index.js";
import { linearizeMultiple } from "../linearize/multipleSemantics.js";
import { linearizePair } from "../linearize/pairSemantics.js";
import {
import { linearizeMultipleSemantics } from "../linearize/multipleSemantics.js";
import { linearizePairSemantics } from "../linearize/pairSemantics.js";
import type {
Vertex_Operation as Operation,
Vertex,
} from "../proto/drp/object/v1/object_pb.js";
import { BitSet } from "./bitset.js";

// Reexporting the Vertex and Operation types from the protobuf file
export { Vertex, Operation };
export type { Vertex, Operation };

export type Hash = string;

Expand Down Expand Up @@ -42,6 +42,11 @@ export type ResolveConflictsType = {
vertices?: Hash[];
};

export type VertexDistance = {
distance: number;
closestDependency?: Hash;
};

export class HashGraph {
nodeId: string;
resolveConflicts: (vertices: Vertex[]) => ResolveConflictsType;
Expand All @@ -62,6 +67,7 @@ export class HashGraph {
private arePredecessorsFresh = false;
private reachablePredecessors: Map<Hash, BitSet> = new Map();
private topoSortedIndex: Map<Hash, number> = new Map();
private vertexDistances: Map<Hash, VertexDistance> = new Map();
// We start with a bitset of size 1, and double it every time we reach the limit
private currentBitsetSize = 1;

Expand All @@ -86,6 +92,9 @@ export class HashGraph {
this.vertices.set(HashGraph.rootHash, rootVertex);
this.frontier.push(HashGraph.rootHash);
this.forwardEdges.set(HashGraph.rootHash, []);
this.vertexDistances.set(HashGraph.rootHash, {
distance: 0,
});
}

addToFrontier(operation: Operation): Vertex {
Expand All @@ -110,9 +119,24 @@ export class HashGraph {
this.forwardEdges.get(dep)?.push(hash);
}

// Compute the distance of the vertex
const vertexDistance: VertexDistance = {
distance: Number.MAX_VALUE,
closestDependency: "",
};
for (const dep of deps) {
const depDistance = this.vertexDistances.get(dep);
if (depDistance && depDistance.distance + 1 < vertexDistance.distance) {
vertexDistance.distance = depDistance.distance + 1;
vertexDistance.closestDependency = dep;
}
}
this.vertexDistances.set(hash, vertexDistance);

const depsSet = new Set(deps);
this.frontier = this.frontier.filter((hash) => !depsSet.has(hash));
this.arePredecessorsFresh = false;

return vertex;
}

Expand Down Expand Up @@ -149,22 +173,41 @@ export class HashGraph {
this.forwardEdges.get(dep)?.push(hash);
}

// Compute the distance of the vertex
const vertexDistance: VertexDistance = {
distance: Number.MAX_VALUE,
closestDependency: "",
};
for (const dep of deps) {
const depDistance = this.vertexDistances.get(dep);
if (depDistance && depDistance.distance + 1 < vertexDistance.distance) {
vertexDistance.distance = depDistance.distance + 1;
vertexDistance.closestDependency = dep;
}
}
this.vertexDistances.set(hash, vertexDistance);

const depsSet = new Set(deps);
this.frontier = this.frontier.filter((hash) => !depsSet.has(hash));
this.arePredecessorsFresh = false;
return hash;
}

depthFirstSearch(visited: Map<Hash, number> = new Map()): Hash[] {
depthFirstSearch(
origin: Hash,
subgraph: Set<Hash>,
visited: Map<Hash, number> = new Map(),
): Hash[] {
const result: Hash[] = [];
for (const vertex of this.getAllVertices()) {
visited.set(vertex.hash, DepthFirstSearchState.UNVISITED);
for (const hash of subgraph) {
visited.set(hash, DepthFirstSearchState.UNVISITED);
}
const visit = (hash: Hash) => {
visited.set(hash, DepthFirstSearchState.VISITING);

const children = this.forwardEdges.get(hash) || [];
for (const child of children) {
if (!subgraph.has(child)) continue;
if (visited.get(child) === DepthFirstSearchState.VISITING) {
log.error("::hashgraph::DFS: Cycle detected");
return;
Expand All @@ -182,16 +225,20 @@ export class HashGraph {
visited.set(hash, DepthFirstSearchState.VISITED);
};

visit(HashGraph.rootHash);
visit(origin);

return result;
}

topologicalSort(updateBitsets = false): Hash[] {
const result = this.depthFirstSearch();
/* Topologically sort the vertices in the whole hashgraph or the past of a given vertex. */
topologicalSort(
updateBitsets = false,
origin: Hash = HashGraph.rootHash,
subgraph: Set<Hash> = new Set(this.vertices.keys()),
): Hash[] {
const result = this.depthFirstSearch(origin, subgraph);
result.reverse();
if (!updateBitsets) return result;

this.reachablePredecessors.clear();
this.topoSortedIndex.clear();

Expand Down Expand Up @@ -221,17 +268,109 @@ export class HashGraph {
return result;
}

linearizeOperations(): Operation[] {
linearizeOperations(
origin: Hash = HashGraph.rootHash,
subgraph: Set<string> = new Set(this.vertices.keys()),
): Operation[] {
switch (this.semanticsType) {
case SemanticsType.pair:
return linearizePair(this);
return linearizePairSemantics(this, origin, subgraph);
case SemanticsType.multiple:
return linearizeMultiple(this);
return linearizeMultipleSemantics(this, origin, subgraph);
default:
return [];
}
}

lowestCommonAncestorMultipleVertices(
hashes: Hash[],
visited: Set<Hash>,
): Hash {
if (hashes.length === 0) {
throw new Error("Vertex dependencies are empty");
}
if (hashes.length === 1) {
return hashes[0];
}
let lca: Hash | undefined = hashes[0];
const targetVertices: Hash[] = [...hashes];
for (let i = 1; i < targetVertices.length; i++) {
if (!lca) {
throw new Error("LCA not found");
}
if (!visited.has(targetVertices[i])) {
lca = this.lowestCommonAncestorPairVertices(
lca,
targetVertices[i],
visited,
targetVertices,
);
}
}
if (!lca) {
throw new Error("LCA not found");
}
return lca;
}

private lowestCommonAncestorPairVertices(
hash1: Hash,
hash2: Hash,
visited: Set<Hash>,
targetVertices: Hash[],
): Hash | undefined {
let currentHash1 = hash1;
let currentHash2 = hash2;
visited.add(currentHash1);
visited.add(currentHash2);

while (currentHash1 !== currentHash2) {
const distance1 = this.vertexDistances.get(currentHash1);
if (!distance1) {
log.error("::hashgraph::LCA: Vertex not found");
return;
}
const distance2 = this.vertexDistances.get(currentHash2);
if (!distance2) {
log.error("::hashgraph::LCA: Vertex not found");
return;
}

if (distance1.distance > distance2.distance) {
if (!distance1.closestDependency) {
log.error("::hashgraph::LCA: Closest dependency not found");
return;
}
for (const dep of this.vertices.get(currentHash1)?.dependencies || []) {
if (dep !== distance1.closestDependency && !visited.has(dep)) {
targetVertices.push(dep);
}
}
currentHash1 = distance1.closestDependency;
if (visited.has(currentHash1)) {
return currentHash2;
}
visited.add(currentHash1);
} else {
if (!distance2.closestDependency) {
log.error("::hashgraph::LCA: Closest dependency not found");
return;
}
for (const dep of this.vertices.get(currentHash2)?.dependencies || []) {
if (dep !== distance2.closestDependency && !visited.has(dep)) {
targetVertices.push(dep);
}
}
currentHash2 = distance2.closestDependency;
if (visited.has(currentHash2)) {
return currentHash1;
}
visited.add(currentHash2);
}
}
return currentHash1;
}

areCausallyRelatedUsingBitsets(hash1: Hash, hash2: Hash): boolean {
if (!this.arePredecessorsFresh) {
this.topologicalSort(true);
Expand Down Expand Up @@ -303,7 +442,11 @@ export class HashGraph {
}

const visited = new Map<Hash, number>();
this.depthFirstSearch(visited);
this.depthFirstSearch(
HashGraph.rootHash,
new Set(this.vertices.keys()),
visited,
);
for (const vertex of this.getAllVertices()) {
if (!visited.has(vertex.hash)) {
return false;
Expand Down
Loading

1 comment on commit 90d554e

@github-actions
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Performance Alert ⚠️

Possible performance regression was detected for benchmark 'Benchmark.js Benchmark'.
Benchmark result of this commit is worse than the previous benchmark result exceeding threshold 1.30.

Benchmark suite Current: 90d554e Previous: 76d22c9 Ratio
Create HashGraph with 1000 vertices 10.54 ops/sec (±1.79%) 246 ops/sec (±0.36%) 23.34
Create 2 DRP Objects (1000 vertices each) and Merge 0.5 ops/sec (±1.45%) 0.98 ops/sec (±3.13%) 1.96

This comment was automatically generated by workflow using github-action-benchmark.

Please sign in to comment.