Sqoop 1.4.6
执行方法
sqoop --options-file options1
1.hdfstomysql
export
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123
--table
hdfstomysql
--columns
id,name,age
-m
1
--export-dir
hdfs://mycluster/hdfstomysql
2.mysqltohive
import
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123
--target-dir
/sqoop/THive
--delete-target-dir
--as-textfile
-m
1
--table
T_P
--columns
id,name,age
--hive-import
--hive-overwrite
--hive-table
T_hive
3.mysqltohdfs
import
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123
--target-dir
/sqoop/T1
--delete-target-dir
--as-textfile
-m
1
--table
T_P
--columns
id,name,age
1.
import
--connect
jdbc:mysql://bigdatahadoop:3306/test
--username
root
--password
123456
--table
t_person
--columns
id,name,age
--where
1=1
--target-dir
/sqoop/test1
--delete-target-dir
--as-textfile
-m
1
--null-string
''
--null-non-string
''
2
import
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123456
--query
'select p.id,name,age,c.card_id,date_format(c.create_date,'%Y-%m-%d') as c_date from t_person p join t_id_card c on p.id=c.p_id where p.age>17 and $CONDITIONS'
--target-dir
/sqoop/test2
--delete-target-dir
--as-textfile
-m
1
--null-string
''
--null-non-string
''
3
import
--connect
jdbc:mysql://bigdataspark:3306/test
--username
root
--password
123456
--query
'select p.id,name,age,c.card_id,date_format(c.create_date,'%Y-%m-%d') as c_date from t_person p join t_id_card c on p.id=c.p_id where p.age>17 and $CONDITIONS'
--target-dir
/sqoop/test3
--delete-target-dir
--as-textfile
-m
1
--null-string
''
--null-non-string
''
--hive-import
--hive-overwrite
--create-hive-table
--hive-table
t_person_card
--hive-partition-key
day
--hive-partition-value
'2016-07-03'
4
export
--connect
jdbc:mysql://bigdatacloud:3306/test
--username
root
--password
123456
--table
t_person
--columns
id,name,age
-m
1
--export-dir
hdfs://mycluster/test