• springboot集成sqoop


    sqoop我就不再这里给大家介绍了,能来到这里应该都知道sqoop了。
    目前我写了一个demo,springboot操作sqoop。废话不多说,直入主题。

    pom文件
    <?xml version="1.0" encoding="UTF-8"?>
    <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <parent>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-parent</artifactId>
    <version>2.2.1.RELEASE</version>
    <relativePath/> <!-- lookup parent from repository -->
    </parent>
    <groupId>com.example</groupId>
    <artifactId>demo</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <name>demo</name>
    <description>Demo project for Spring Boot</description>

    <properties>
    <java.version>1.8</java.version>
    </properties>

    <dependencies>

    <dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-web</artifactId>
    </dependency>

    <dependency>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-starter-test</artifactId>
    <scope>test</scope>
    <exclusions>
    <exclusion>
    <groupId>org.junit.vintage</groupId>
    <artifactId>junit-vintage-engine</artifactId>
    </exclusion>
    </exclusions>
    </dependency>


    <!--sqoop!!!!!!!!!!!!!!!!!!!!!!!!-->
    <dependency>
    <groupId>mysql</groupId>
    <artifactId>mysql-connector-java</artifactId>
    <version>5.1.42</version>
    </dependency>
    <dependency>
    <groupId>org.apache.sqoop</groupId>
    <artifactId>sqoop</artifactId>
    <version>1.4.7</version>
    </dependency>

    <dependency>
    <groupId>org.apache.commons</groupId>
    <artifactId>commons-lang3</artifactId>
    <version>3.0</version>
    </dependency>

    <dependency>
    <groupId>org.apache.commons</groupId>
    <artifactId>commons-lang3</artifactId>
    <version>3.0</version>
    </dependency>
    <!--hadoop-->
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-common</artifactId>
    <version>2.8.4</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-hdfs</artifactId>
    <version>2.8.4</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-mapreduce-client-core</artifactId>
    <version>2.8.4</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-mapreduce-client-common</artifactId>
    <version>2.8.4</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
    <version>2.8.4</version>
    <scope>test</scope>
    </dependency>
    <dependency>
    <groupId>org.apache.avro</groupId>
    <artifactId>avro-mapred</artifactId>
    <version>1.8.1</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hive</groupId>
    <artifactId>hive-common</artifactId>
    <version>2.3.2</version>
    </dependency>
    <dependency>
    <groupId>org.apache.avro</groupId>
    <artifactId>avro</artifactId>
    <version>1.8.1</version>
    </dependency>


    </dependencies>

    <build>
    <plugins>
    <plugin>
    <groupId>org.springframework.boot</groupId>
    <artifactId>spring-boot-maven-plugin</artifactId>
    </plugin>
    </plugins>
    </build>

    </project>

     
    controller
    package com.example.demo.module.controller;

    import com.example.demo.module.bean.sqoopBean;
    import com.example.demo.module.service.sqoopService;
    import org.springframework.beans.factory.annotation.Autowired;
    import org.springframework.web.bind.annotation.PostMapping;
    import org.springframework.web.bind.annotation.RequestMapping;
    import org.springframework.web.bind.annotation.ResponseBody;
    import org.springframework.web.bind.annotation.RestController;

    @RestController
    public class sqoopController {
    @Autowired
    private sqoopService ss;
    @RequestMapping("/hi")
    public String get(){
    return "你好";
    }

    //数据从mysql到hdfs
    @PostMapping("/mysql2hdfs")
    @ResponseBody
    public sqoopBean sqoopTransform(String jdbc, String driver, String username, String password, String table, int m, String targetdir, String putlocation) throws Exception {
    return ss.db2db(jdbc,driver,username,password,table,m,targetdir,putlocation);
    //返回0正确,返回1错误
    }
    //数据从mysql到hbase
    @PostMapping("/mysql2hbase")
    @ResponseBody
    public sqoopBean transformMysql2Hbase(String jdbc,String driver,String username,String password,String mysqlTable,String hbaseTableName,String columnFamily,String rowkey,int m) throws Exception {
    return ss.mysql2Hbase(jdbc,driver,username,password,mysqlTable,hbaseTableName,columnFamily,rowkey,m);
    }


    }

     
    bean
    package com.example.demo.module.bean;

    import java.sql.Timestamp;

    public class sqoopBean {
    private int i;
    private Timestamp ts;

    public int getI() {
    return i;
    }

    public int setI(int i) {
    this.i = i;
    return i;
    }

    @Override
    public String toString() {
    return "sqoopBean{" +
    "i=" + i +
    ", ts=" + ts +
    '}';
    }

    public Timestamp getTs() {
    return ts;
    }

    public Timestamp setTs(Timestamp ts) {
    this.ts = ts;
    return ts;
    }
    }

     
    service
    package com.example.demo.module.service;

    import com.example.demo.module.bean.sqoopBean;

    public interface sqoopService {
    public sqoopBean db2db(String jdbc, String driver, String username, String password, String table, int m, String targetdir, String putlocation) throws Exception;
    //mysql到hbase
    public sqoopBean mysql2Hbase( String jdbc, String driver, String username, String password,String mysqlTable, String hbaseTableName, String columnFamily, String rowkey, int m) throws Exception;


    serviceImpl
    package com.example.demo.module.service.impl;

    import com.example.demo.module.bean.sqoopBean;
    import com.example.demo.module.service.sqoopService;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.sqoop.Sqoop;
    import org.apache.sqoop.tool.SqoopTool;
    import org.apache.sqoop.util.OptionsFileUtil;
    import org.springframework.stereotype.Service;

    import java.sql.Timestamp;
    import java.util.Date;

    @Service
    public class sqoopServiceImpl implements sqoopService {
    // @Autowired
    // private sqoopBean sqoopbean;

    @Override
    public sqoopBean db2db(String jdbc, String driver, String username, String password, String table, int m, String targetdir, String putlocation) throws Exception {

    String[] args = new String[] {
    "--connect",jdbc,
    "--driver",driver,
    "-username",username,
    "-password",password,
    "--table",table,
    "-m",String.valueOf(m),
    "--target-dir",targetdir,
    };

    sqoopBean sqoopBean = new sqoopBean();
    String[] expandArguments = OptionsFileUtil.expandArguments(args);
    SqoopTool tool = SqoopTool.getTool("import");
    Configuration conf = new Configuration();
    conf.set("fs.default.name", putlocation);//设置HDFS服务地址
    Configuration loadPlugins = SqoopTool.loadPlugins(conf);
    Sqoop sqoop = new Sqoop((com.cloudera.sqoop.tool.SqoopTool) tool, loadPlugins);
    int i = sqoopBean.setI(Sqoop.runSqoop(sqoop,expandArguments));
    Timestamp ts = sqoopBean.setTs(new Timestamp(new Date().getTime()));
    // map.put("result",Sqoop.runSqoop(sqoop,expandArguments)); map.put("time",new Timestamp(new Date().getTime()));

    return sqoopBean;


    }
    //mysql到hbase
    @Override
    public sqoopBean mysql2Hbase( String jdbc, String driver, String username, String password,String mysqlTable, String hbaseTableName, String columnFamily, String rowkey, int m) throws Exception {
    String[] args = new String[] {
    "--connect",jdbc,
    "--driver",driver,
    "-username",username,
    "-password",password,
    "--table",mysqlTable,
    "--hbase-table",hbaseTableName,
    "--column-family",columnFamily,
    "--hbase-create-table",
    "--hbase-row-key",rowkey,
    "-m",String.valueOf(m),
    };
    sqoopBean sqoopBean = new sqoopBean();
    String[] expandArguments = OptionsFileUtil.expandArguments(args);
    SqoopTool tool = SqoopTool.getTool("import");
    Configuration conf = new Configuration();
    Configuration loadPlugins = SqoopTool.loadPlugins(conf);
    Sqoop sqoop = new Sqoop((com.cloudera.sqoop.tool.SqoopTool) tool, loadPlugins);
    int i = sqoopBean.setI(Sqoop.runSqoop(sqoop,expandArguments));
    Timestamp ts = sqoopBean.setTs(new Timestamp(new Date().getTime()));
    return sqoopBean;
    }

    Application
    package com.example.demo;

    import org.springframework.boot.SpringApplication;
    import org.springframework.boot.autoconfigure.SpringBootApplication;
    import org.springframework.context.annotation.ComponentScan;

    @SpringBootApplication
    public class DemoApplication {

    public static void main(String[] args) {
    SpringApplication.run(DemoApplication.class, args);
    }

    测试
    我现在测试一下mysql数据迁移到hdfs
    下面用到一个软件postman,没有的话百度下载一个

    ————————————————
    版权声明:本文为CSDN博主「怎么全部重名了」的原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接及本声明。
    原文链接:https://blog.csdn.net/qq_16234927/article/details/103182345

  • 相关阅读:
    Redis数据库概述
    分布式爬虫(一)------------------分布式爬虫概述
    Spark环境搭建(五)-----------Spark生态圈概述与Hadoop对比
    错误解决记录------------rhel安装Mysql软件包依赖 mariadb组件
    rhel 7安装Mysql
    Linux虚拟机搭建本地yum源
    rhel配置网络yum源
    Spark环境搭建(四)-----------数据仓库Hive环境搭建
    冲销会计凭证:FBRP与FB08的区别
    xk01创建供应商保存的时候,提示错误“科目800001已经存在”
  • 原文地址:https://www.cnblogs.com/javalinux/p/14846615.html
Copyright © 2020-2023  润新知