Skip to content

Commit

Permalink
code optimization
Browse files Browse the repository at this point in the history
aiceflower committed Nov 4, 2024

Verified

This commit was signed with the committer’s verified signature.
zecakeh Kévin Commaille
1 parent 8aa527c commit 18613ce
Showing 3 changed files with 33 additions and 7 deletions.
Original file line number Diff line number Diff line change
@@ -66,6 +66,7 @@ public enum SparkErrorCodeSummary implements LinkisErrorCode {
43032, "The application start failed, since yarn applicationId is null."),

NOT_SUPPORT_METHOD(43040, "Not support method for requestExpectedResource."),
NOT_SUPPORT_FUNCTION(43050, "NoT support spacial udf in non-SQL script."),
;

/** (errorCode)错误码 */
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.linkis.engineplugin.spark.exception

import org.apache.linkis.common.exception.ErrorException

/**
*/
class RuleCheckFailedException(errCode: Int, desc: String) extends ErrorException(errCode, desc) {}
Original file line number Diff line number Diff line change
@@ -18,11 +18,11 @@
package org.apache.linkis.engineplugin.spark.extension

import org.apache.linkis.engineconn.computation.executor.conf.ComputationExecutorConf
import org.apache.linkis.server.BDPJettyServerHelper
import org.apache.linkis.engineplugin.spark.errorcode.SparkErrorCodeSummary
import org.apache.linkis.engineplugin.spark.exception.RuleCheckFailedException

import org.apache.commons.lang3.StringUtils
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.catalyst.rules.Rule

@@ -50,7 +50,6 @@ case class SparkUDFCheckRule(sparkSession: SparkSession) extends Rule[LogicalPla
}

override def apply(plan: LogicalPlan): LogicalPlan = {
logger.info(plan.toString())
// 从系统属性中获取代码类型
val codeType: String = System.getProperty(ComputationExecutorConf.CODE_TYPE, "sql")
logger.info("SparkUDFCheckRule codeType: {}", codeType)
@@ -69,9 +68,9 @@ case class SparkUDFCheckRule(sparkSession: SparkSession) extends Rule[LogicalPla
node.collect {
case e: LogicalPlan if containsSpecificFunction(e, udfName) =>
logger.info("contains specific functionName: {}", e.toString())
// 如果找到包含特定udf函数的表达式,则抛出异常中断
throw new RuntimeException(
s"Found expression containing specific functionName in non-SQL code type, terminating optimization."
throw new RuleCheckFailedException(
SparkErrorCodeSummary.NOT_SUPPORT_FUNCTION.getErrorCode,
SparkErrorCodeSummary.NOT_SUPPORT_FUNCTION.getErrorDesc
)
case _ =>
logger.info(
@@ -81,7 +80,9 @@ case class SparkUDFCheckRule(sparkSession: SparkSession) extends Rule[LogicalPla
}
}
} catch {
case e: RuntimeException =>
case e: RuleCheckFailedException =>
throw e
case e: Exception =>
logger.info("check udf function error: {}", e.getMessage)
}
plan

0 comments on commit 18613ce

Please sign in to comment.