Apache Spark 中的一个简单的 Hive UDF

import org.apache.spark.sql.functions._

// Create a function that uses the content of the column inside the dataframe
val code = (param: String) => if (param == "myCode") 1 else 0
// With that function, create the udf function
val myUDF = udf(code)
// Apply the udf to a column inside the existing dataframe, creating a dataframe with the additional new column
val newDataframe = aDataframe.withColumn("new_column_name", myUDF(col(inputColumn)))