Sqoop基础操作

Import

Sqoop to hive

sqoop import \
--connect jdbc:mysql://hadoop1:3306/test \
--username root \
--password 123456 \
--table book_info \
--columns 'book_id,book_name' \
--where 'book_id < 20' \
--fields-terminated-by '\t' \
--delete-target-dir \
--num-mappers 1 \
--direct \
--hive-import \
--hive-database default \
--hive-table book_info

还可以使用query

sqoop import \
--connect jdbc:mysql://hadoop1:3306/test \
--username root \
--password 123456 \
--query 'select book_id,book_name from book_info where $CONDITIONS' \
--target-dir /user/hive/book_info \
--fields-terminated-by '\t' \
--delete-target-dir \
--num-mappers 1 \
--direct \
--hive-import \
--hive-database default \
--hive-table book_info
append 自增id
sqoop import \
--connect jdbc:mysql://hadoop1:3306/test \
--username root \
--password 123456 \
--table book_info \
--columns 'book_id,book_name' \
--fields-terminated-by '\t' \
--num-mappers 1 \
--direct \
--hive-import \
--hive-database default \
--hive-table book_info \
--incremental append \
--check-column book_id \
--last-value 20 

lastmodified 日期

sqoop import \
--connect jdbc:mysql://hadoop1:3306/test \
--username root \
--password 123456 \
--table book_info \
--columns 'book_id,publish_date' \
--fields-terminated-by '\t' \
--num-mappers 1 \
--direct \
--hive-import \
--hive-database default \
--hive-table book_info \
--incremental lastmodified \
--check-column publish_date \
--last-value '2018-07-10 19:53:40'

export

导出到mysql

sqoop export \
--connect  jdbc:mysql://hadoop1:3306/test \
--driver com.mysql.jdbc.Driver \
--username root \
--password 123456 \
--table recruitment \
--fields-terminated-by '\001' \
--export-dir /user/hive/warehouse/work.db/job_tmp \
--direct

更新mysql字段

sqoop export \
--connect jdbc:mysql://hadoop1:3306/test \
--username root \
--password 123456 \
--driver com.mysql.jdbc.Driver \
--columns 'id,salary' \
--table work_info \
--fields-terminated-by '\t' \
--export-dir /user/hive/warehouse/work.db/job_info_textfile \
--update-key 'id' \
--direct