./sqoop export --connect jdbc:mysql://127.0.0.1:3306/test --username hive --password hive --table MYTEST2 --columns "name,fileurl" --hcatalog-database default --hcatalog-table mytest2
./sqoop export --connect jdbc:mysql://localhost:3306/test --username hive --password hive --table MYTEST2 --export-dir /user/hive/warehouse/mytest2 --input-fields-terminated-by
./sqoop export --connect jdbc:mysql://localhost:3306/test --username hive --password hive --table MYTEST2 --export-dir /tmp/hive/hadoop --input-fields-terminated-by ' '
1.
sqoop export --connect "jdbc:mysql://127.0.0.1:3306/test?useUnicode=true&characterEncoding=utf-8" --username hive --table MYTEST2 --password hive --export-dir /input --fields-terminated-by ' '
2.
--把mysql的表结构复制到hive中
sqoop create-hive-table --connect jdbc:mysql://localhost:3306/test --table MYTEST2 --username hive --password hive --hive-table mytest2
3.hadoop执行默认自带wordcount
hadoop jar hadoop-examples-1.2.1.jar wordcount /input /output
4.hive创建表
CREATE EXTERNAL TABLE IF NOT EXISTS MYTEST2 (
iname string,
fileurl string
)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ' '
LINES TERMINATED BY '
'
STORED AS TEXTFILE
LOCATION
'/input/test';