org.apache.spark.unsafe.types.UTF8String.toInt()I
这是我这边导入的依赖~
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.1.1</version>
</dependency>
如下是一个小demo,在执行的过程中报:Exception in thread "main" java.lang.NoSuchMethodError: org.apache.spark.unsafe.types.UTF8String.toInt()I
def main(args: Array[String]): Unit = {
    val session = SparkSession
      .builder()
      .appName("dd")
      .master("local[*]")
      .config("hive.metastore.uris", "thrift://test-dev-cdh-1:9083")
      .config("spark.sql.warehouse.dir", "/user/hive/warehouse")
      .config("metastore.catalog.default", "hive")
      .enableHiveSupport()
      .getOrCreate()
    session.sql("show databases").show()
    session.sql("use dev_analysis")
    session.sql("show tables").show()
    session.sql("select day from tsgl_terminal_exhaust4 limit 10").show()
  }
解决方案:去掉sparkCore依赖
    生命不息,编码不止;人尚在人间,创新不可停,程序员共勉之~
 
                    
                
 
                
            
         浙公网安备 33010602011771号
浙公网安备 33010602011771号