For importing from database to Hive
sqoop import -D mapreduce.job.queuename=abcdef --connect jdbc:oracle:thin:@dbdddd-sc8:1521/dbservice1 --username userrrrr --password cdwewerewrewrwerV --split-by 'modifyts' --hive-import --hive-table 'abd.det_landing' --delete-target-dir --target-dir 'hdfs://bigredns/apps/hive/warehouse/f abd.db/det_landing' --query " select header_key , document_type , order_date , modifyts , status , order_no , shipping_service , item_id , item_short_description , fulfillment_type , prime_line_no , extn_guest_sel_ship_opt , unit_price , line_total , status_date , status_quantity , scac , carrier_service_code , aged_order , req_ship_date , must_ship_date , ship_advice_no , shipnode_type , shipnode_name , node_description , status , status_desc , expected_date , actual_date , city ,line_key , order_release_status_key , reason_code,wcs_guest_id,btch_ld_d,two_days_shipping_f from mgr.ord_det_vb12 where trunc(btch_ld_d)=trunc(sysdate -1) and \$CONDITIONS" -m 30 --fetch-size 10000 --fields-terminated-by '\001' --lines-terminated-by '\n'
Export to Oracle db from the filesystem
sqoop export -D mapreduce.job.queuename= abcdef -D mapreduce.map.memory.mb=2048 \
--connect jdbc:oracle:thin:@db-sc8:1521/dbservice1 \
--username user \
--password jjnnsnnnnpassword \
--num-mappers 10 \
--input-fields-terminated-by ";" \
--table MGR.ord_MTH \
--export-dir /common/abcd/def_Output/method*