笔记5

#!/bin/sh

home=$(cd `dirname $0`; cd ..; pwd)
. ${home}/bin/common.sh

export HADOOP_HEAPSIZE=20000

fsimage_binary_name=`ls ${fsimage_binary_path} | grep ${cluster} | grep ${day}`

fsimage_binary_file=${fsimage_binary_path}/${fsimage_binary_name}

fsimage_txt_name=${fsimage_binary_name}.txt
fsimage_txt_file=${fsimage_txt_path}/${fsimage_txt_name}

hdfs oiv -p Delimited -i ${fsimage_binary_file} -o ${fsimage_txt_file}

hdfs dfs -mkdir -p ${fsimage_org_hdfs_path}
hdfs dfs -rm ${fsimage_org_hdfs_path}/${fsimage_txt_name}
hdfs dfs -put ${fsimage_txt_file} ${fsimage_org_hdfs_path}

echo -e "==========> load to hive start ==========>"
hive -e "ALTER TABLE xunshan.dwd_fsimage_org DROP IF EXISTS PARTITION(day=${day},cluster='${cluster}');"
hive -e "ALTER TABLE xunshan.dwd_fsimage_org ADD PARTITION(day=${day},cluster='${cluster}') LOCATION '${fsimage_org_hdfs_path}';"
echo -e "==========> load to hive end ==========>"

 

posted @ 2021-04-01 23:11  Simon92  阅读(63)  评论(0编辑  收藏  举报