• 搭建sparksql的hive测试环境


    sbt依赖

    name := "Pi"
    version := "1.0"
    scalaVersion := "2.10.6"
    
    
    libraryDependencies++= Seq(
      "org.apache.spark" %% "spark-core" % "1.5.2",
      "org.apache.spark" % "spark-hive_2.10" % "1.5.2",
      "joda-time" % "joda-time" % "2.9.2"
    )
    
    resolvers+="OS China" at "http://maven.oschina.net/content/groups/public/"
    import org.apache.spark._
    import org.apache.spark.sql.SQLContext
    import org.apache.spark.sql.hive.HiveContext
    
    
    /**
      * Created by code-pc on 16/3/14.
      */
    object Pi {
    
    
      def main(args: Array[String]) {
    
        val conf = new SparkConf().setMaster("local[5]").setAppName("AndrzejApp")
        val sc=new SparkContext(conf)
    
        val hqlc=new HiveContext(sc)
        val st=hqlc.sql("show tables")
        println("hello")
        st.collect().foreach(println)
      }
    }

    这里写图片描述

  • 相关阅读:
    ubuntu 16.04 安装显卡驱动,再安装cuda
    8. golang 基本类型转换
    7.golang的字符串 string
    5. 变量定义
    4. 代码规范
    3.golang 的注释
    1.windows server 201x
    exec 命令
    powershell
    1.Dockerfile
  • 原文地址:https://www.cnblogs.com/ggzone/p/10121128.html
Copyright © 2020-2023  润新知