1.
su hdfs
cd ~
2.
hive -e " set hive.cli.print.header=true;
USE saas; SELECT CONCAT_WS(',',
cast( nvl(order_id ,0) as string)
,cast( nvl(total_amount ,0.00) as string)
,cast( nvl(dinner_count ,0) as string)
,cast( nvl(latitude ,0.00) as string)
,cast( nvl(longitude ,0.00) as string)
,cast( nvl(has_invoice ,0) as string)
,cast( nvl(invoice_type ,0) as string)
,cast( nvl(invoice_title ,'') as string)
,cast( nvl(invoice_taxpayer ,'') as string)
,cast( nvl(description ,'') as string)
,cast( nvl(place_time,cast('1000-01-01 00:00:00' as timestamp)) as string)
,cast( nvl(delivery_time,cast('1000-01-01 00:00:00' as timestamp)) as string)
,cast( nvl(order_type ,'') as string)
,cast( nvl(status ,'') as string)
,cast( nvl(create_time,cast('1000-01-01 00:00:00' as timestamp)) as string)
,cast( nvl(dt ,0) as string)
,cast( nvl(data_source ,'') as string))
from order_di
where shop_code = '76729050' ; " | sed 's/[\t]/,/g' > one_shop.csv
3.压缩
zip -r one_shop.zip one_shop.csv
4.sz one_shop.zip
5.csv 注意科学计数法的转换