您的位置:首页 > 其它

Scala进阶源码实战之一——数组、函数基础、流程控制、文件、异常

2016-04-18 17:06 561 查看
Scala进阶源码实战系列,省去一些冗余的文字基础介绍,直接上代码,结合注释,实战进阶。为Spark打下语言基础。最好集中时间短时间突击完,计划一周。

最开始的一些简单的借助ScalaIDE source sheet,帮助理解类型之类的

数组

package basic

object basics {
println("Welcome to the Scala worksheet")       //> Welcome to the Scala worksheet

//不可变 list
val array = Array(1,2,3,4,5)                   //> array  : Array[Int] = Array(1, 2, 3, 4, 5)
for(elem <- array) println(elem)           //> 1
//| 2
//| 3
//| 4
//| 5
val nums = new Array[Int](10)                 //> nums  : Array[Int] = Array(0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
val a = new Array[String](10)                 //> a  : Array[String] = Array(null, null, null, null, null, null, null, null, n
//| ull, null)
val s1 = Array("Hello", "World")              //> s1  : Array[String] = Array(Hello, World)
s1(0) = "Goodbye"

//可变mutable
import scala.collection.mutable.ArrayBuffer
val b = ArrayBuffer[Int]()                    //> b  : scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer()
b += 1                                        //> res0: basic.basics.b.type = ArrayBuffer(1)
b += (1, 2, 3, 5)                             //> res1: basic.basics.b.type = ArrayBuffer(1, 1, 2, 3, 5)
b ++= Array(8, 13, 21)                        //> res2: basic.basics.b.type = ArrayBuffer(1, 1, 2, 3, 5, 8, 13, 21)
b.trimEnd(5)
b.insert(2, 6)
b.insert(2, 7, 8, 9)
b.remove(2)                                   //> res3: Int = 7
b.remove(2, 3)
b.toArray                                     //> res4: Array[Int] = Array(1, 1, 2)

val c = Array(2, 3, 5, 7, 11)           //> c  : Array[Int] = Array(2, 3, 5, 7, 11)
val result = for (elem <- c) yield 2 * elem   //> result  : Array[Int] = Array(4, 6, 10, 14, 22)
for (elem <- c if elem % 2 == 0) yield 2 * elem
//> res5: Array[Int] = Array(4)
c.filter(_ % 2 == 0).map(2 * _)               //> res6: Array[Int] = Array(4)

Array(1, 7, 2, 9).sum                        //> res7: Int = 19
ArrayBuffer("Mary", "had", "a", "little", "lamb").max
//> res8: String = little

val d = ArrayBuffer(1, 7, 2, 9)               //> d  : scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(1, 7, 2, 9)
val bSorted = d.sorted                        //> bSorted  : scala.collection.mutable.ArrayBuffer[Int] = ArrayBuffer(1, 2, 7,
//| 9)

val e = Array(1, 7, 2, 9)                     //> e  : Array[Int] = Array(1, 7, 2, 9)
scala.util.Sorting.quickSort(e)

e.mkString(" and ")                           //> res9: String = 1 and 2 and 7 and 9
a.mkString("<", ",", ">")                     //> res10: String = <null,null,null,null,null,null,null,null,null,null>

//多维数组
val matrix = Array.ofDim[Double](3, 4)        //> matrix  : Array[Array[Double]] = Array(Array(0.0, 0.0, 0.0, 0.0), Array(0.0
//| , 0.0, 0.0, 0.0), Array(0.0, 0.0, 0.0, 0.0))
matrix(2)(1) = 42
val triangle = new Array[Array[Int]](10)      //> triangle  : Array[Array[Int]] = Array(null, null, null, null, null, null, n
//| ull, null, null, null)
for (i <- 0 until triangle.length)
triangle(i) = new Array[Int](i + 1)

//  Tuple 元组
val triple = (100, "Scala", "Spark")          //> triple  : (Int, String, String) = (100,Scala,Spark)
println(triple._1)                            //> 100
println(triple._2)                            //> Scala

//map
val ages = Map("Rocky" -> 27, "Spark" -> 5)   //> ages  : scala.collection.immutable.Map[String,Int] = Map(Rocky -> 27, Spark
//|  -> 5)

for((k,v) <- ages){
println("Key is " + k + ",value is " + v)
//> Key is Rocky,value is 27
//| Key is Spark,value is 5
}

for((k,_) <- ages){ //placeholder
println("Key is " + k)    //> Key is Rocky
//| Key is Spark
}

val map = Map("book"->10,"gun"->18,"ipad"->1000)
//> map  : scala.collection.immutable.Map[String,Int] = Map(book -> 10, gun ->
//| 18, ipad -> 1000)
for((k,v) <- map) yield (k,v * 0.9)             //> res11: scala.collection.immutable.Map[String,Double] = Map(book -> 9.0, gun
//|  -> 16.2, ipad -> 900.0)

//可变
val scores = scala.collection.mutable.Map("Scala" -> 7, "Hadoop" -> 8, "Spark" -> 10 )
//> scores  : scala.collection.mutable.Map[String,Int] = Map(Hadoop -> 8, Spark
//|  -> 10, Scala -> 7)
val hadoopScore = scores.getOrElse("Hadoop", 0) //> hadoopScore  : Int = 8
//getorelse
//可变才有 +=  -= 等操作,类似于list
scores += ("R" -> 9)                            //> res12: basic.basics.scores.type = Map(Hadoop -> 8, R -> 9, Spark -> 10, Sca
//| la -> 7)
scores -= "Hadoop"                              //> res13: basic.basics.scores.type = Map(R -> 9, Spark -> 10, Scala -> 7)

//不可变
val sortedScore = scala.collection.immutable.SortedMap("Scala" -> 7, "Hadoop" -> 8, "Spark" -> 10 )
//> sortedScore  : scala.collection.immutable.SortedMap[String,Int] = Map(Hadoo
//| p -> 8, Scala -> 7, Spark -> 10)

val tuple =(1,2,3.14,"Rocky","Spark","Flink")   //> tuple  : (Int, Int, Double, String, String, String) = (1,2,3.14,Rocky,Spark
//| ,Flink)
val third = tuple._3                            //> third  : Double = 3.14
val (first,second,thirda,fourth,fifth,sixth) = tuple
//> first  : Int = 1
//| second  : Int = 2
//| thirda  : Double = 3.14
//| fourth  : String = Rocky
//| fifth  : String = Spark
//| sixth  : String = Flink
//部分提取
val (f, s, _, _, _,_) = tuple                   //> f  : Int = 1
//| s  : Int = 2

"Rocky Spark".partition(_.isUpper)              //> res14: (String, String) = (RS,ocky park)

//拉链
val symbols = Array("[", "-", "]")              //> symbols  : Array[String] = Array([, -, ])
val counts = Array(2,5,2)                       //> counts  : Array[Int] = Array(2, 5, 2)
val pairs = symbols.zip(counts)                 //> pairs  : Array[(String, Int)] = Array(([,2), (-,5), (],2))
for ((x,y) <- pairs) print(x*y)

}


函数基础和for高级用法

package basic

import scala.io.Source

object forfunction {
println("Welcome to the Scala worksheet")       //> Welcome to the Scala worksheet

//函数赋值给变量
var increase = (x: Int) => x + 1//> increase  : Int => Int = <function1>
println(increase(10))           //> 11
increase = (x: Int) => x + 9999

val someNumbers = List(-11, -10, -5, 0, 5, 10)
//> someNumbers  : List[Int] = List(-11, -10, -5, 0, 5, 10)
someNumbers.foreach((x: Int) => print (x))
//> -11-10-50510
//简化过程
someNumbers.filter((x: Int) => x > 0).foreach((x: Int) => print (x))
//> 510
someNumbers.filter((x) => x > 0).foreach((x: Int) => print (x))
//> 510
someNumbers.filter(x => x > 0).foreach((x: Int) => print (x))
//> 510
someNumbers.filter(_ > 0).foreach((x: Int) => print (x))
//> 510
val f = (_: Int) + (_: Int)     //> f  : (Int, Int) => Int = <function2>
println(f(5, 10))               //> 15

for(i <- 1 to 2; j <- 1 to 2) print((100*i + j) + "  ")
//> 101  102  201  202
for(i <- 1 to 2; j <- 1 to 2 if i != j) print((100*i + j) + "  ")
//> 102  201

def  addA(x : Int) = x +100                     //> addA: (x: Int)Int
val  add = (x : Int) => x +200            //> add  : Int => Int = <function1>
println("The result from a function is : " + addA(2))
//> The result from a function is : 102
println("The result from a val is : " + add(2))
//> The result from a val is : 202
//尾递归方式,减少栈的层数,防止栈溢出,需要指定返回类型
def fac(n:Int):Int = if (n <= 0) 1 else n * fac(n - 1)
//> fac: (n: Int)Int
println("The result from a fac is : " + fac(10))
//> The result from a fac is : 3628800
def combine(content:String, left: String = "[", right: String = "]") = left + content +right
//> combine: (content: String, left: String, right: String)String
println("The result from a combine is : " + combine("I love Spark", "<<"))
//> The result from a combine is : <<I love Spark]
def connected(args: Int*) = {
var result =0
for(arg <- args) result += arg
result
}                                         //> connected: (args: Int*)Int
println("The result from a connected is : " + connected(1,2,3,4,5,6))
//> The result from a connected is : 21

def main(args: Array[String]) {
val width = args(0).toInt
for (arg <- args.drop(1))
processData(arg, width)

}                                         //> main: (args: Array[String])Unit

def processData(filename: String, width: Int) {
//内部函数, 强内部耦合, 内部函数可以访问外部函数的字段
def processLine(line: String) {
if (line.length > width)
println(filename +": "+ line)
}

val source = Source.fromFile(filename)
for (line <- source.getLines)
processLine(line)

}                                         //> processData: (filename: String, width: Int)Unit

}


流程控制

def looper(x : Long, y : Long) : Long = {
var a = x
var b = y
while(a != 0){
val temp = a
a = b % a
b = temp
}
b
}                                                //> looper: (x: Long, y: Long)Long
var line = ""                           //> line  : String = ""
do {
println("Please input some words blow......")
line = readLine()
println("Read: " + line)
} while (line != "")                    //> Please input some words blow......
def doWhile(){
var line = ""
do {
line = readLine()
println("Read: " + line)
} while (line != "")
}


文件与异常

import scala.io.Source
//        val file = Source.fromFile("***.txt")
val file = Source.fromURL("http://spark.apache.org/")
//> file  : scala.io.BufferedSource = non-empty iterator
for (line <- file.getLines){
println(line)
//> <!DOCTYPE html>
//| <html lang="en">
//| <head>
//|   <meta charset="utf-8">
//|   <meta http-equiv="X-UA-Compatible" content="IE=edge">
//|   <meta name="viewport" content="width=device-width, initial-scale=1.0">
//|
//|   <title>
//|      Apache Spark™ - Lightning-Fast Cluster Computing
//|
//|   </title>
//|
//|
//|
//|
//|     <meta name="description" content="Apache Spark is a fast and general en
//| gine for big data processing, with built-in modules for streaming, SQL, mac
//| hine learning and graph processing.">
//|
//|
//|   <!-- Bootstrap core CSS -->
//|   <link href="/css/cerulean.min.css" rel="stylesheet">
//|   <link href="/css/custom.css" rel="stylesheet">
//|
//|   <!-- Code highlighter CSS -->
//|   <link href="/css/pygments-default.css" rel="stylesheet">
//|
//|   <script type="text/javascript">
//|   <!-- Google Analytics initialization -->
//|   var _gaq = _gaq || [];
//|   _ga
//| Output exceeds cutoff limit.
def main(args: Array[String]){
var file = "scala.txt"
if (!args.isEmpty) file = args(0)
val file1 = if(!args.isEmpty) args(0) else "scala.xml"
//
println(file)

println(if(!args.isEmpty) args(0) else "Spark.xml")

for (i <- 1.to(10)) {
println("Number is :" + i)
}
//
val files = (new java.io.File(".")).listFiles()
for (file <- files){
println(file)
}

val n = 99
//    val file = "Spark.txt"
//    openFile(file)
try {
val half = if (n % 2 == 0) n /2 else throw
new RuntimeException("N must be event")
// Use the file
}catch {
case e : Exception => println("The exception is :" + e.getMessage())
}finally{
//      close(file)
}
}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: