diff --git a/src/main/cpp/src/HashJni.cpp b/src/main/cpp/src/HashJni.cpp index 398128483c..520b6f24c0 100644 --- a/src/main/cpp/src/HashJni.cpp +++ b/src/main/cpp/src/HashJni.cpp @@ -21,9 +21,9 @@ extern "C" { -JNIEXPORT jint JNICALL Java_com_nvidia_spark_rapids_jni_Hash_getMaxNestedDepth(JNIEnv* env, jclass) +JNIEXPORT jint JNICALL Java_com_nvidia_spark_rapids_jni_Hash_getMaxStackDepth(JNIEnv* env, jclass) { - return spark_rapids_jni::MAX_NESTED_DEPTH; + return spark_rapids_jni::MAX_STACK_DEPTH; } JNIEXPORT jlong JNICALL Java_com_nvidia_spark_rapids_jni_Hash_murmurHash32( diff --git a/src/main/cpp/src/hash.hpp b/src/main/cpp/src/hash.hpp index 18157710ed..9ec7496031 100644 --- a/src/main/cpp/src/hash.hpp +++ b/src/main/cpp/src/hash.hpp @@ -25,7 +25,7 @@ namespace spark_rapids_jni { constexpr int64_t DEFAULT_XXHASH64_SEED = 42; -constexpr int MAX_NESTED_DEPTH = 8; +constexpr int MAX_STACK_DEPTH = 8; /** * @brief Computes the murmur32 hash value of each row in the input set of columns. diff --git a/src/main/java/com/nvidia/spark/rapids/jni/Hash.java b/src/main/java/com/nvidia/spark/rapids/jni/Hash.java index b28812fa6c..96b66555a7 100644 --- a/src/main/java/com/nvidia/spark/rapids/jni/Hash.java +++ b/src/main/java/com/nvidia/spark/rapids/jni/Hash.java @@ -25,7 +25,7 @@ public class Hash { // there doesn't appear to be a useful constant in spark to reference. this could break. static final long DEFAULT_XXHASH64_SEED = 42; - public static final int MAX_NESTED_DEPTH = getMaxNestedDepth(); + public static final int MAX_STACK_DEPTH = getMaxStackDepth(); static { NativeDepsLoader.loadNativeDeps(); @@ -102,7 +102,7 @@ public static ColumnVector hiveHash(ColumnView columns[]) { return new ColumnVector(hiveHash(columnViews)); } - private static native int getMaxNestedDepth(); + private static native int getMaxStackDepth(); private static native long murmurHash32(int seed, long[] viewHandles) throws CudfException;