Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[ML-187]Support spark 3.1.3 and 3.2.0 and support CDH #197

Merged
merged 7 commits into from
Apr 7, 2022
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
support spark 3.1.3
Signed-off-by: minmingzhu <[email protected]>
minmingzhu committed Mar 24, 2022
commit ec9424c8313ee37de11990eccb87d064244f976e
Original file line number Diff line number Diff line change
@@ -32,7 +32,7 @@ object NaiveBayesShim extends Logging {
def create(uid: String): NaiveBayesShim = {
logInfo(s"Loading NaiveBayes for Spark $SPARK_VERSION")
val shim = SPARK_VERSION match {
case "3.1.1" | "3.1.2" | "3.2.0" => new NaiveBayesSpark320(uid)
case "3.1.1" | "3.1.2" | "3.1.3" | "3.2.0" => new NaiveBayesSpark320(uid)
case _ => throw new SparkException(s"Unsupported Spark version $SPARK_VERSION")
}
shim
Original file line number Diff line number Diff line change
@@ -32,7 +32,7 @@ object KMeansShim extends Logging {
def create(uid: String): KMeansShim = {
logInfo(s"Loading KMeans for Spark $SPARK_VERSION")
val kmeans = SPARK_VERSION match {
case "3.1.1" | "3.1.2" | "3.2.0" => new KMeansSpark320(uid)
case "3.1.1" | "3.1.2" | "3.1.3" | "3.2.0" => new KMeansSpark320(uid)
case _ => throw new SparkException(s"Unsupported Spark version $SPARK_VERSION")
}
kmeans
Original file line number Diff line number Diff line change
@@ -32,7 +32,7 @@ object PCAShim extends Logging {
def create(uid: String): PCAShim = {
logInfo(s"Loading PCA for Spark $SPARK_VERSION")
val pca = SPARK_VERSION match {
case "3.1.1" | "3.1.2" | "3.2.0" => new PCASpark320(uid)
case "3.1.1" | "3.1.2" | "3.1.3" | "3.2.0" => new PCASpark320(uid)
case _ => throw new SparkException(s"Unsupported Spark version $SPARK_VERSION")
}
pca
Original file line number Diff line number Diff line change
@@ -18,7 +18,7 @@ package com.intel.oap.mllib.recommendation

import org.apache.spark.internal.Logging
import org.apache.spark.ml.recommendation.ALS.Rating
import org.apache.spark.ml.recommendation.spark312.{ALS => ALSSpark312}
import org.apache.spark.ml.recommendation.spark313.{ALS => ALSSpark313}
import org.apache.spark.ml.recommendation.spark320.{ALS => ALSSpark320}
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
@@ -47,7 +47,7 @@ object ALSShim extends Logging {
def create(): ALSShim = {
logInfo(s"Loading ALS for Spark $SPARK_VERSION")
val als = SPARK_VERSION match {
case "3.1.1" | "3.1.2" => new ALSSpark312()
case "3.1.1" | "3.1.2" | "3.1.3" => new ALSSpark313()
case "3.2.0" => new ALSSpark320()
case _ => throw new SparkException(s"Unsupported Spark version $SPARK_VERSION")
}
Original file line number Diff line number Diff line change
@@ -19,7 +19,7 @@ package com.intel.oap.mllib.regression
import org.apache.spark.internal.Logging
import org.apache.spark.ml.param.ParamMap
import org.apache.spark.ml.regression.LinearRegressionModel
import org.apache.spark.ml.regression.spark312.{LinearRegression => LinearRegressionSpark312}
import org.apache.spark.ml.regression.spark313.{LinearRegression => LinearRegressionSpark313}
import org.apache.spark.ml.regression.spark320.{LinearRegression => LinearRegressionSpark320}
import org.apache.spark.sql.Dataset
import org.apache.spark.{SPARK_VERSION, SparkException}
@@ -33,7 +33,7 @@ object LinearRegressionShim extends Logging {
def create(uid: String): LinearRegressionShim = {
logInfo(s"Loading ALS for Spark $SPARK_VERSION")
val linearRegression = SPARK_VERSION match {
case "3.1.1" | "3.1.2" => new LinearRegressionSpark312(uid)
case "3.1.1" | "3.1.2" | "3.1.3" => new LinearRegressionSpark313(uid)
case "3.2.0" => new LinearRegressionSpark320(uid)
case _ => throw new SparkException(s"Unsupported Spark version $SPARK_VERSION")
}
Original file line number Diff line number Diff line change
@@ -35,7 +35,7 @@ object CorrelationShim extends Logging {
def create(): CorrelationShim = {
logInfo(s"Loading Correlation for Spark $SPARK_VERSION")
val als = SPARK_VERSION match {
case "3.1.1" | "3.1.2" | "3.2.0" => new CorrelationSpark320()
case "3.1.1" | "3.1.2" | "3.1.3" | "3.2.0" => new CorrelationSpark320()
case _ => throw new SparkException(s"Unsupported Spark version $SPARK_VERSION")
}
als
Original file line number Diff line number Diff line change
@@ -34,7 +34,7 @@ object SummarizerShim extends Logging {
def create(): SummarizerShim = {
logInfo(s"Loading Summarizer for Spark $SPARK_VERSION")
val summarizer = SPARK_VERSION match {
case "3.1.1" | "3.1.2" | "3.2.0" => new SummarizerSpark320()
case "3.1.1" | "3.1.2" | "3.1.3" | "3.2.0" => new SummarizerSpark320()
case _ => throw new SparkException(s"Unsupported Spark version $SPARK_VERSION")
}
summarizer
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@
*/
// scalastyle:on

package org.apache.spark.ml.recommendation.spark312
package org.apache.spark.ml.recommendation.spark313

import com.github.fommil.netlib.BLAS.{getInstance => blas}
import com.intel.oap.mllib.{Utils => DALUtils}
Original file line number Diff line number Diff line change
@@ -17,7 +17,7 @@
*/
// scalastyle:on

package org.apache.spark.ml.regression.spark312
package org.apache.spark.ml.regression.spark313

import breeze.linalg.{DenseVector => BDV}
import breeze.optimize.{