Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

upgrading pointstore #284

Merged
merged 4 commits into from
Jan 12, 2022
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Java/core/src/main/java/com/amazon/randomcutforest/RCF3.java
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
import com.amazon.randomcutforest.sampler.CompactSampler;
import com.amazon.randomcutforest.sampler.IStreamSampler;
import com.amazon.randomcutforest.store.IPointStore;
import com.amazon.randomcutforest.store.RCF3PointStore;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.tree.IBoundingBoxView;
import com.amazon.randomcutforest.tree.ITree;
import com.amazon.randomcutforest.tree.NewRandomCutTree;
Expand Down Expand Up @@ -296,7 +296,7 @@ public RCF3(Builder<?> builder) {
}

private void initCompactFloat(Builder<?> builder) {
IPointStore tempStore = RCF3PointStore.builder().capacity(pointStoreCapacity).initialSize(2 * sampleSize)
IPointStore tempStore = PointStore.builder().capacity(pointStoreCapacity).initialSize(2 * sampleSize)
.internalShinglingEnabled(internalShinglingEnabled).shingleSize(shingleSize).dimensions(dimensions)
// .dynamicResizingEnabled(true)
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Remove?

.internalRotationEnabled(builder.internalRotationEnabled).build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,8 @@
import com.amazon.randomcutforest.sampler.IStreamSampler;
import com.amazon.randomcutforest.sampler.SimpleStreamSampler;
import com.amazon.randomcutforest.store.IPointStore;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.store.PointStoreDouble;
import com.amazon.randomcutforest.store.PointStoreFloat;
import com.amazon.randomcutforest.tree.CompactRandomCutTreeDouble;
import com.amazon.randomcutforest.tree.CompactRandomCutTreeFloat;
import com.amazon.randomcutforest.tree.ITree;
Expand Down Expand Up @@ -323,7 +323,7 @@ private void initCompactDouble(Builder<?> builder) {
}

private void initCompactFloat(Builder<?> builder) {
PointStoreFloat tempStore = PointStoreFloat.builder().internalRotationEnabled(builder.internalRotationEnabled)
PointStore tempStore = PointStore.builder().internalRotationEnabled(builder.internalRotationEnabled)
.capacity(pointStoreCapacity).initialSize(initialPointStoreSize)
.directLocationEnabled(builder.directLocationMapEnabled)
.internalShinglingEnabled(internalShinglingEnabled)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,8 +48,8 @@
import com.amazon.randomcutforest.state.tree.CompactRandomCutTreeFloatMapper;
import com.amazon.randomcutforest.state.tree.CompactRandomCutTreeState;
import com.amazon.randomcutforest.store.IPointStore;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.store.PointStoreDouble;
import com.amazon.randomcutforest.store.PointStoreFloat;
import com.amazon.randomcutforest.tree.CompactRandomCutTreeDouble;
import com.amazon.randomcutforest.tree.CompactRandomCutTreeFloat;
import com.amazon.randomcutforest.tree.ITree;
Expand Down Expand Up @@ -163,7 +163,7 @@ public RandomCutForestState toState(RandomCutForest forest) {
if (forest.getPrecision() == Precision.FLOAT_32) {
PointStoreFloatMapper mapper = new PointStoreFloatMapper();
mapper.setCompressionEnabled(compressionEnabled);
pointStoreState = mapper.toState((PointStoreFloat) pointStoreCoordinator.getStore());
pointStoreState = mapper.toState((PointStore) pointStoreCoordinator.getStore());
} else {
PointStoreDoubleMapper mapper = new PointStoreDoubleMapper();
mapper.setCompressionEnabled(compressionEnabled);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@

import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.state.IStateMapper;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.store.PointStoreDouble;
import com.amazon.randomcutforest.util.ArrayPacking;

Expand All @@ -49,7 +48,7 @@ public PointStoreDouble toModel(PointStoreState state, long seed) {
int startOfFreeSegment = state.getStartOfFreeSegment();
int[] refCount = ArrayPacking.unpackInts(state.getRefCount(), indexCapacity, state.isCompressed());
int[] locationList = new int[indexCapacity];
Arrays.fill(locationList, PointStore.INFEASIBLE_POINTSTORE_LOCATION);
Arrays.fill(locationList, PointStoreDouble.INFEASIBLE_LOCN);
int[] tempList = ArrayPacking.unpackInts(state.getLocationList(), state.isCompressed());
System.arraycopy(tempList, 0, locationList, 0, tempList.length);

Expand All @@ -71,23 +70,25 @@ public PointStoreState toState(PointStoreDouble model) {
state.setDimensions(model.getDimensions());
state.setCapacity(model.getCapacity());
state.setShingleSize(model.getShingleSize());
state.setDirectLocationMap(model.isDirectLocationMap());
state.setDirectLocationMap(false);
state.setInternalShinglingEnabled(model.isInternalShinglingEnabled());
state.setLastTimeStamp(model.getNextSequenceIndex());
if (model.isInternalShinglingEnabled()) {
state.setInternalShingle(model.getInternalShingle());
state.setRotationEnabled(model.isInternalRotationEnabled());
}
state.setDynamicResizingEnabled(model.isDynamicResizingEnabled());
if (model.isDynamicResizingEnabled()) {
state.setDynamicResizingEnabled(true);
if (state.isDynamicResizingEnabled()) {
state.setCurrentStoreCapacity(model.getCurrentStoreCapacity());
state.setIndexCapacity(model.getIndexCapacity());
}
state.setStartOfFreeSegment(model.getStartOfFreeSegment());
state.setPrecision(Precision.FLOAT_64.name());
int prefix = model.getValidPrefix();
state.setRefCount(ArrayPacking.pack(model.getRefCount(), prefix, state.isCompressed()));
state.setLocationList(ArrayPacking.pack(model.getLocationList(), prefix, state.isCompressed()));
// int prefix = model.getValidPrefix();
int[] refcount = model.getRefCount();
state.setRefCount(ArrayPacking.pack(refcount, refcount.length, state.isCompressed()));
int[] locationList = model.getLocationList();
state.setLocationList(ArrayPacking.pack(locationList, locationList.length, state.isCompressed()));
state.setPointData(ArrayPacking.pack(model.getStore(), model.getStartOfFreeSegment()));
return state;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,28 +18,25 @@
import static com.amazon.randomcutforest.CommonUtils.checkArgument;
import static com.amazon.randomcutforest.CommonUtils.checkNotNull;

import java.util.Arrays;

import lombok.Getter;
import lombok.Setter;

import com.amazon.randomcutforest.config.Precision;
import com.amazon.randomcutforest.state.IStateMapper;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.store.PointStoreFloat;
import com.amazon.randomcutforest.util.ArrayPacking;

@Getter
@Setter
public class PointStoreFloatMapper implements IStateMapper<PointStoreFloat, PointStoreState> {
public class PointStoreFloatMapper implements IStateMapper<PointStore, PointStoreState> {

/**
* If true, then the arrays are compressed via simple data dependent scheme
*/
private boolean compressionEnabled = true;

@Override
public PointStoreFloat toModel(PointStoreState state, long seed) {
public PointStore toModel(PointStoreState state, long seed) {
checkNotNull(state.getRefCount(), "refCount must not be null");
checkNotNull(state.getPointData(), "pointData must not be null");
checkArgument(Precision.valueOf(state.getPrecision()) == Precision.FLOAT_32,
Expand All @@ -50,11 +47,10 @@ public PointStoreFloat toModel(PointStoreState state, long seed) {
int startOfFreeSegment = state.getStartOfFreeSegment();
int[] refCount = ArrayPacking.unpackInts(state.getRefCount(), indexCapacity, state.isCompressed());
int[] locationList = new int[indexCapacity];
Arrays.fill(locationList, PointStore.INFEASIBLE_POINTSTORE_LOCATION);
int[] tempList = ArrayPacking.unpackInts(state.getLocationList(), state.isCompressed());
System.arraycopy(tempList, 0, locationList, 0, tempList.length);

return PointStoreFloat.builder().internalRotationEnabled(state.isRotationEnabled())
return PointStore.builder().internalRotationEnabled(state.isRotationEnabled())
.internalShinglingEnabled(state.isInternalShinglingEnabled())
.dynamicResizingEnabled(state.isDynamicResizingEnabled())
.directLocationEnabled(state.isDirectLocationMap()).indexCapacity(indexCapacity)
Expand All @@ -65,30 +61,31 @@ public PointStoreFloat toModel(PointStoreState state, long seed) {
}

@Override
public PointStoreState toState(PointStoreFloat model) {
public PointStoreState toState(PointStore model) {
model.compact();
PointStoreState state = new PointStoreState();
state.setCompressed(compressionEnabled);
state.setDimensions(model.getDimensions());
state.setCapacity(model.getCapacity());
state.setShingleSize(model.getShingleSize());
state.setDirectLocationMap(model.isDirectLocationMap());
state.setDirectLocationMap(false);
state.setInternalShinglingEnabled(model.isInternalShinglingEnabled());
state.setLastTimeStamp(model.getNextSequenceIndex());
if (model.isInternalShinglingEnabled()) {
state.setInternalShingle(model.getInternalShingle());
state.setRotationEnabled(model.isInternalRotationEnabled());
}
state.setDynamicResizingEnabled(model.isDynamicResizingEnabled());
if (model.isDynamicResizingEnabled()) {
state.setDynamicResizingEnabled(true);
if (state.isDynamicResizingEnabled()) {
state.setCurrentStoreCapacity(model.getCurrentStoreCapacity());
state.setIndexCapacity(model.getIndexCapacity());
}
state.setStartOfFreeSegment(model.getStartOfFreeSegment());
state.setPrecision(Precision.FLOAT_32.name());
int prefix = model.getValidPrefix();
state.setRefCount(ArrayPacking.pack(model.getRefCount(), prefix, state.isCompressed()));
state.setLocationList(ArrayPacking.pack(model.getLocationList(), prefix, state.isCompressed()));
int[] refcount = model.getRefCount();
state.setRefCount(ArrayPacking.pack(refcount, refcount.length, state.isCompressed()));
int[] locationList = model.getLocationList();
state.setLocationList(ArrayPacking.pack(locationList, locationList.length, state.isCompressed()));
state.setPointData(ArrayPacking.pack(model.getStore(), model.getStartOfFreeSegment()));
return state;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,8 @@
/**
* A class for storing the state of a
* {@link com.amazon.randomcutforest.store.PointStoreDouble} or a
* {@link com.amazon.randomcutforest.store.PointStoreFloat}. Depending on which
* kind of point store was serialized, one of the fields {@code doubleData} or
* {@link com.amazon.randomcutforest.store.PointStore}. Depending on which kind
* of point store was serialized, one of the fields {@code doubleData} or
* {@code floatData} will be null.
*/
@Data
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
import com.amazon.randomcutforest.state.store.SmallNodeStoreMapper;
import com.amazon.randomcutforest.store.INodeStore;
import com.amazon.randomcutforest.store.NodeStore;
import com.amazon.randomcutforest.store.PointStoreFloat;
import com.amazon.randomcutforest.store.PointStore;
import com.amazon.randomcutforest.store.SmallNodeStore;
import com.amazon.randomcutforest.tree.AbstractCompactRandomCutTree;
import com.amazon.randomcutforest.tree.CompactRandomCutTreeFloat;
Expand Down Expand Up @@ -58,7 +58,7 @@ public CompactRandomCutTreeFloat toModel(CompactRandomCutTreeState state, Compac
CompactRandomCutTreeFloat tree = new CompactRandomCutTreeFloat.Builder()
.boundingBoxCacheFraction(state.getBoundingBoxCacheFraction())
.storeSequenceIndexesEnabled(state.isStoreSequenceIndexesEnabled()).maxSize(state.getMaxSize())
.root(state.getRoot()).randomSeed(state.getSeed()).pointStore((PointStoreFloat) context.getPointStore())
.root(state.getRoot()).randomSeed(state.getSeed()).pointStore((PointStore) context.getPointStore())
.nodeStore(nodeStore).centerOfMassEnabled(state.isCenterOfMassEnabled())
.outputAfter(state.getOutputAfter()).build();
return tree;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import static com.amazon.randomcutforest.CommonUtils.checkState;

import java.util.Arrays;
import java.util.Stack;

/**
* This class defines common functionality for Store classes, including
Expand All @@ -42,6 +43,21 @@ public IntervalManager(int capacity) {
freeIndexesEnd[0] = capacity - 1;
}

public IntervalManager(Stack<int[]> stack, int capacity) {
checkArgument(capacity > 0, "incorrect parameters");
lastInUse = stack.size();
freeIndexesEnd = new int[lastInUse + 1];
freeIndexesStart = new int[lastInUse + 1];
this.capacity = capacity;
int count = 0;
while (stack.size() > 0) {
int[] interval = stack.pop();
freeIndexesStart[count] = interval[0];
freeIndexesEnd[count] = interval[1];
++count;
}
}

public void extendCapacity(int newCapacity) {
checkArgument(newCapacity > capacity, " incorrect call, we can only increase capacity");
// the current capacity need not be the final capacity, for example in case of
Expand Down
Loading