【解决办法】因为序列化注册类没有生效导致的导致的错误,必须在sparkContext上下文对象初始化之前注册conf配置序列化。示例如下:
val registratorClass = Seq(s"${classOf[GeoMesaSparkKryoRegistrator].getName}",
s"${classOf[BDTKryoRegistrator].getName}",
s"${classOf[KryoRegistrator].getName}") //geotrellis不同版本包名可能不一样
val conf: SparkConf = new SparkConf()
.setAppName(AppName)
.setMaster(master)
.set("spark.executor.cores", spark_executor_cores)
.set("spark.driver.memory", spark_driver_memory)
.set("spark.cores.max", spark_cores_max)
.set("spark.executor.memory", spark_executor_memory)
.set("spark.driver.maxResultSize", spark_executor_maxResultSize)
.set("spark.driver.host", register_host)
.set("spark.driver.port", "6426")
.set("spark.default.parallelism", "4")
.set("spark.deploy.mode", "client")
.set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.kryoserializer.buffer.max", "256m")
.set("spark.kryo.registrator", registratorClass.mkString(","))
.setJars(List(spark_jarpath))
com.supermap.bdt.base.Util.configure(conf)
val spark = SparkSession.builder.config(conf).getOrCreate()
spark.sparkContext.setLogLevel("WARN")