Shell
2016-08-24 22:04
816 查看
#!/bin/bash
source /etc/profile
source ~/.bashrc
export basepath=$(cd `dirname $0`; pwd)
source ${basepath}/cfg/css.cfg
#username="root"
#password="root"
#jdbcUrl="jdbc:mysql://192.168.80.100:3306/"
timers1=`date +%s`
#####################################################
######################每小时执行一次######################
#####################################################
#昨日时间
#etl_date=`date "+%Y-%m-%d" --date="-1 day"`
#etl_date=$1
#今日前一个小时时间
etl_date=`date "+%Y-%m-%d %H" --date="-1 hour"`
#etl_date=`date "+%Y-%m-%d %H" --date="0 hour"`
#etl_date='2016-04-23 17'
echo "##########################################################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "######################计算$etl_date开始######################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "##########################################################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "*10*************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
spark-sql --master yarn-client -d etl_date="${etl_date}" -f ${basepath}/f_ad_stastics.sql >> ${basepath}/log/"${etl_date}"_f_ad_stastics.log 2>&1
echo "***************`date +%Y-%m-%d\ %H:%M:%S`*******" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "*11******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
columns="create_time,unit_id,activity_id,period,sponsor_id,exposure,click,pay_click,out_link_click,share_count,is_settlement"
sqoop export --connect ${jdbcUrl} --username $username --password ${password} --table ad_stastics --columns $columns --export-dir ${hdfsDbPath}/tmp/f_ad_stastics --fields-terminated-by '\001' >> ${basepath}/log/"${etl_date}"_sqoop_f_ad_stastics.log 2>&1
beginDate13=`date "+%Y-%m-%d %H:%M:%S:"`
echo "*******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log
beginDate13=`date "+%Y-%m-%d %H:%M:%S:"`
echo "*******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log
cfg/css.cfg
timers2=`date +%s`
minu=$((($timers2-$timers1)/60))
echo "**************************************************************************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "***********************所有小时计算的指标计算完毕,共耗时${minu}分钟****************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "**************************************************************************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
###############################################################################################################################################
exit
#!/bin/bash
bpath=$(cd `dirname $0`; pwd)
#jdbcUrl=jdbc:mysql://192.168.0.243:3306/ofbiz_result?characterEncoding=utf-8
#username=root
#password=123456
#hdfs://hadoop1:9000//user/hive/warehouse
#hive库
hdfsDbPath=hdfs://spark1:9000/data/
sqoopOutDir=${bpath}/../log/
crontab
#按天计算 业务指标
0 1 * * * . /etc/profile;/home/hadoop/xlzhou/run-all-day.sh >> /home/hadoop/xlzhou/log/crontab-z-run-all-day.log 2>&1 &
#按小时计算 流量指标
5 */1 * * * . /etc/profile;/home/hadoop/xlzhou/run-hour.sh >> /home/hadoop/xlzhou/log/crontab-z-run-hour.log 2>&1 &
source /etc/profile
source ~/.bashrc
export basepath=$(cd `dirname $0`; pwd)
source ${basepath}/cfg/css.cfg
#username="root"
#password="root"
#jdbcUrl="jdbc:mysql://192.168.80.100:3306/"
timers1=`date +%s`
#####################################################
######################每小时执行一次######################
#####################################################
#昨日时间
#etl_date=`date "+%Y-%m-%d" --date="-1 day"`
#etl_date=$1
#今日前一个小时时间
etl_date=`date "+%Y-%m-%d %H" --date="-1 hour"`
#etl_date=`date "+%Y-%m-%d %H" --date="0 hour"`
#etl_date='2016-04-23 17'
echo "##########################################################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "######################计算$etl_date开始######################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "##########################################################" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "*10*************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
spark-sql --master yarn-client -d etl_date="${etl_date}" -f ${basepath}/f_ad_stastics.sql >> ${basepath}/log/"${etl_date}"_f_ad_stastics.log 2>&1
echo "***************`date +%Y-%m-%d\ %H:%M:%S`*******" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "*11******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
columns="create_time,unit_id,activity_id,period,sponsor_id,exposure,click,pay_click,out_link_click,share_count,is_settlement"
sqoop export --connect ${jdbcUrl} --username $username --password ${password} --table ad_stastics --columns $columns --export-dir ${hdfsDbPath}/tmp/f_ad_stastics --fields-terminated-by '\001' >> ${basepath}/log/"${etl_date}"_sqoop_f_ad_stastics.log 2>&1
beginDate13=`date "+%Y-%m-%d %H:%M:%S:"`
echo "*******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log
beginDate13=`date "+%Y-%m-%d %H:%M:%S:"`
echo "*******************`date +%Y-%m-%d\ %H:%M:%S`*****************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log
cfg/css.cfg
timers2=`date +%s`
minu=$((($timers2-$timers1)/60))
echo "**************************************************************************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "***********************所有小时计算的指标计算完毕,共耗时${minu}分钟****************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
echo "**************************************************************************************************" >> ${basepath}/log/"${etl_date}"_main_z_run_day.log 2>&1
###############################################################################################################################################
exit
#!/bin/bash
bpath=$(cd `dirname $0`; pwd)
#jdbcUrl=jdbc:mysql://192.168.0.243:3306/ofbiz_result?characterEncoding=utf-8
#username=root
#password=123456
#hdfs://hadoop1:9000//user/hive/warehouse
#hive库
hdfsDbPath=hdfs://spark1:9000/data/
sqoopOutDir=${bpath}/../log/
crontab
#按天计算 业务指标
0 1 * * * . /etc/profile;/home/hadoop/xlzhou/run-all-day.sh >> /home/hadoop/xlzhou/log/crontab-z-run-all-day.log 2>&1 &
#按小时计算 流量指标
5 */1 * * * . /etc/profile;/home/hadoop/xlzhou/run-hour.sh >> /home/hadoop/xlzhou/log/crontab-z-run-hour.log 2>&1 &
相关文章推荐
- 【读书笔记】:Shell和Service(CAB和SCSF编程)
- shell应用:数据求和
- 一个不错的shell 脚本教程 (国)
- 记录cu上shell比赛的第6题:输入日期,打印天数
- shell 获取系统时间
- 使用shell命令去除文件中的BOM
- Shell练习-统计出每个IP的访问量有多少?
- 初写Shell脚本错误记录
- Shell学习笔记(6)
- 【Shell】从ILSVRC_DET数据集中单独拿出某一类图片和注释文件
- shell整理(34)===date 命令的小脚本
- shell 字符串连接方法
- ORACLE自动备份并且自动FTP到备份机的SHELL脚本(http://doc.linuxpk.com/456.html)
- shell脚本执行方法及差异
- shell-012: 特殊符号
- 亿能测试大讲堂 - YY在线课程[ 测试人员需要掌握的Shell脚本编程 ]
- shell自动监控重启Tomcat脚本
- Storm 多语言支持之ShellBolt原理及改进
- SHELL入门教程(6)-环境
- shell 自动重启nginx php shell脚本