您的位置:首页 > 数据库

spark下rdd和dataframe以及sqlcontext之间相互转换

2017-03-04 20:19 615 查看
直接看代码

import org.apache.spark.sql.{DataFrame, SQLContext}
import org.apache.spark.{SparkConf, SparkContext}

/**
* wo xi huan xie dai ma
* Created by wangtuntun on 16-5-7.
*/
object clean {

def main(args: Array[String]) {

//设置环境
val conf=new SparkConf().setAppName("tianchi").setMaster("local")
val sc=new SparkContext(conf)
val sqc=new SQLContext(sc)

case class user_pay_class(shop_id:String,user_id:String,DS:String)//注册一个类

val user_pay_raw=sc.textFile("/home/wangtuntun/user_pay.txt")
val user_pay_split=user_pay_raw.map(_.split(","))
val user_transform =user_pay_split.map{ x=> //数据转换
val userid=x(0)
val shop_id=x(1)
val ts=x(2)
val ts_split=ts.split(" ")
val year_month_day=ts_split(0).split("-")
val year=year_month_day(0)
val month=year_month_day(1)
val day=year_month_day(2)
// (shop_id,userid,year,month,day)
(shop_id,userid,ts_split(0))
}
val df=sqc.createDataFrame(user_transform) // 生成一个dataframe
val df_name_colums=df.toDF("shop_id","userid","DS") //给df的每个列取名字
df_name_colums.registerTempTable("user_pay_table") //注册临时表
val sql="select shop_id,count(userid),DS from user_pay_table group by shop_id,DS order by shop_id desc,DS"
val rs: DataFrame =sqc.sql(sql)
rs.foreach(x=>println(x))
// user_transform.saveAsTextFile("/home/wangtuntun/test_file4.txt")
val rs_rdd=rs.map( x=>( x(0),x(1),x(2) ) ) //rs转为rdd
rs_rdd.saveAsTextFile("/home/wangtuntun/test_file5.txt")
sc.stop();

}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息