您的位置:首页 > 运维架构 > Shell

Shell

2016-08-24 22:04 816 查看
#!/bin/bash

source /etc/profile

source ~/.bashrc

export basepath=$(cd `dirname $0`; pwd)

source ${basepath}/cfg/css.cfg

#username="root"

#password="root"

#jdbcUrl="jdbc:mysql://192.168.80.100:3306/"

timers1=`date +%s`

#####################################################

######################每小时执行一次######################

#####################################################

#昨日时间

#etl_date=`date "+%Y-%m-%d" --date="-1 day"`

#etl_date=$1

#今日前一个小时时间

etl_date=`date "+%Y-%m-%d %H" --date="-1 hour"`

#etl_date=`date "+%Y-%m-%d %H" --date="0 hour"`

#etl_date='2016-04-23 17'

echo "##########################################################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

echo "######################计算$etl_date开始######################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

echo "##########################################################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

echo "*10*************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

spark-sql --master yarn-client -d etl_date="${etl_date}" -f ${basepath}/f_ad_stastics.sql >> ${basepath}/log/"${etl_date}"_f_ad_stastics.log 2>&1

echo "***************`date +%Y-%m-%d\ %H:%M:%S`*******" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

echo "*11******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >>  ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

columns="create_time,unit_id,activity_id,period,sponsor_id,exposure,click,pay_click,out_link_click,share_count,is_settlement"

sqoop export --connect ${jdbcUrl} --username $username --password ${password}  --table ad_stastics --columns $columns --export-dir ${hdfsDbPath}/tmp/f_ad_stastics --fields-terminated-by '\001' >> ${basepath}/log/"${etl_date}"_sqoop_f_ad_stastics.log 2>&1

beginDate13=`date "+%Y-%m-%d %H:%M:%S:"`

echo "*******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log

beginDate13=`date "+%Y-%m-%d %H:%M:%S:"`

echo "*******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log

cfg/css.cfg

timers2=`date +%s`

minu=$((($timers2-$timers1)/60))

echo "**************************************************************************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

echo "***********************所有小时计算的指标计算完毕,共耗时${minu}分钟****************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

echo "**************************************************************************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1

###############################################################################################################################################
exit

#!/bin/bash

bpath=$(cd `dirname $0`; pwd)

#jdbcUrl=jdbc:mysql://192.168.0.243:3306/ofbiz_result?characterEncoding=utf-8

#username=root

#password=123456

#hdfs://hadoop1:9000//user/hive/warehouse

#hive库

hdfsDbPath=hdfs://spark1:9000/data/

sqoopOutDir=${bpath}/../log/

crontab

#按天计算 业务指标

0 1 * * * . /etc/profile;/home/hadoop/xlzhou/run-all-day.sh >> /home/hadoop/xlzhou/log/crontab-z-run-all-day.log 2>&1 &

#按小时计算 流量指标

5 */1 * * * . /etc/profile;/home/hadoop/xlzhou/run-hour.sh >> /home/hadoop/xlzhou/log/crontab-z-run-hour.log 2>&1 &
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: