• sqoop导入数据到hdfs初始化



    #!/bin/bash

    ##########################################
    # System Name : 大数据智慧分析平台 #
    # Author : heguoxiu #
    # Description : sqoop导入数据到hdfs #
    ##########################################


    #引用公共变量配置文件
    source /qhapp/cdh//parse/opt/shell/public/public_db.cfg


    #变量说明
    #v_table="ec_sales_order_detail_0000"
    v_columns="pay_jnl_no ,order_no ,channel ,channel_date ,channel_type ,sys_code ,um_no ,um_name ,product_code ,trans_action ,merchant_id ,merchant_name ,store_id ,pay_path_no ,trans_date ,app_id ,open_id ,currency ,trans_amt ,order_info ,pay_type ,req_url ,pay_status ,status_code ,status_msg ,created_by ,date_created ,updated_by ,date_updated ,channel_jnl_no ,pay_direction ,trans_acct_date ,summary ,rev_status ,check_status ,pmt_cmp_no"

    v_key="trans_amt"


    ###补数据时间###
    year=`date -d "-1 day 2017-03-03" +%Y`
    month=`date -d "-1 day 2017-03-03" +%Y%m`
    day=`date -d "-1 day 2017-03-03" +%Y%m%d`
    start_dt=`date -d "+0 day 2018-01-01" +%Y-%m-%d`
    end_dt=`date -d "+0 day 2018-08-09" +%Y-%m-%d`

    inc_dt="date_created"
    where_condition="${inc_dt} >= '${start_dt}' AND ${inc_dt} < '${end_dt}'"


    #把hive目标表对应的表目录赋值变量
    hdfs_dir="/ods/ods_initdb/ods_initdb_ec_sales_pay_detail"
    parse_path="/qhapp/cdh/parse/opt/shell/import/import_init"

    hadoop fs -rmr ${hdfs_dir}/*

    echo "ec_sales_pay_detail_0012
    ec_sales_pay_detail_0013
    ec_sales_pay_detail_0014
    ec_sales_pay_detail_0015" |
    while read line;do
    echo $line | { eval $( awk -F " " '{print "v_table="$1}');

    echo "###开始导入${v_table}: `date +"%Y-%m-%d %H:%M:%S"`"

    sqoop import --connect ${v_host}:${v_port}${v_database} --username ${v_username} --password ${v_password}
    --query "select ${v_columns} from ${v_table} t WHERE $CONDITIONS and ${where_condition} "
    --null-string '\N' --null-non-string '\N' --split-by ${v_key} -m 4 --fields-terminated-by "01" --lines-terminated-by " " --append --target-dir ${hdfs_dir}

    echo "###导入结束${v_table}: `date +"%Y-%m-%d %H:%M:%S"`"
    }
    done

    hive -hivevar inc_dt=${inc_dt} -hivevar v_fm_tm=${start_dt} -hivevar v_to_tm=${end_dt} -f "${parse_path}/ods_initdb_ec_sales_pay_detail.sql"

    sqoop import --connect jdbc:oracle:thin:@10.10.10.2:1521:orcl --username iecbi --password 123456
    --query "select t.bi_value,t.catagory_id,t.bi_datetime from BI_TRADING_TRENDS_CATATORY t WHERE $CONDITIONS "
    --null-string '\N' --null-non-string '\N' --split-by 'bi_value' -m 1 --fields-terminated-by "," --lines-terminated-by " " --delete-target-dir --target-dir /ML_DB/LINEAR_REGRESSION

  • 相关阅读:
    洛咕 P4474 王者之剑
    CF708D Incorrect Flow
    CF802C Heidi and Library (hard)
    CF434D Nanami's Power Plant
    洛咕3312 [SDOI2014]数表
    洛咕 P3704 [SDOI2017]数字表格
    洛咕 P3706 [SDOI2017]硬币游戏
    洛咕 P4491 [HAOI2018]染色
    Luogu4240 毒瘤之神的考验 莫比乌斯反演、根号分治
    UOJ269 清华集训2016 如何优雅地求和 下降幂多项式、NTT
  • 原文地址:https://www.cnblogs.com/heguoxiu/p/10120536.html
Copyright © 2020-2023  润新知