随手记比较乱,看书 看视频的时候顺带写的内容,后期会慢慢整理
List 有序可重复的结构,包含 不可变 可变两种
Set 是一个无序不可重复的数据结构
Nil是什么
不可变的空的集合
scala> Nil
res46: scala.collection.immutable.Nil.type = List()
函数
head 头
tail 尾
不允许使用list类定义list
scala> val a = 1::Nil
a: List[Int] = List(1)
变长List
scala> import scala.collection.mutable.ListBuffer
import scala.collection.mutable.ListBufferscala> val l5 = new ListBuffer[Int]
l5: scala.collection.mutable.ListBuffer[Int] = ListBuffer()scala> l5 +=(4, 5,6)
res0: l5.type = ListBuffer(4, 5, 6)scala> l5 -=(1,4)
res1: l5.type = ListBuffer(5, 6)
递归实现求和
scala> def sum(nums: Int*): Int = {
| if (nums.length == 0) 0
| else nums.head + sum(nums.tail: _*)
| }
sum: (nums: Int*)Intscala>
scala> print(sum(1,2,3,4))
10
zip按照index对应关系组成tuple
zipWithIndex会把list的元素加上编号
scala> val names = List("Spark", "Hadoop", "Scala")
names: List[String] = List(Spark, Hadoop, Scala)scala> val score = List(5, 9, 4)
score: List[Int] = List(5, 9, 4)scala> val result = names.zip(score)
result: List[(String, Int)] = List((Spark,5), (Hadoop,9), (Scala,4))scala> result.zipWithIndex
res4: List[((String, Int), Int)] = List(((Spark,5),0), ((Hadoop,9),1), ((Scala,4),2))
Set
无序不可重复结构
scala> val set = Set(1,1,1,2,2,3,3)
set: scala.collection.immutable.Set[Int] = Set(1, 2, 3)
Map
不可变map
scala> val map = Map("hadoop" -> 123, "spark" -> 345, "scala" -> 678)
map: scala.collection.immutable.Map[String,Int] = Map(hadoop -> 123, spark -> 345, scala -> 678)scala> map("12")
java.util.NoSuchElementException: key not found: 12
at scala.collection.MapLike$class.default(MapLike.scala:228)
at scala.collection.AbstractMap.default(Map.scala:59)
at scala.collection.MapLike$class.apply(MapLike.scala:141)
at scala.collection.AbstractMap.apply(Map.scala:59)
... 32 elidedscala> map("hadoop") = 100
<console>:14: error: value update is not a member of scala.collection.immutable.Map[String,Int]
map("hadoop") = 100
可变map
scala> val map = scala.collection.mutable.Map("hadoop" -> 123, "spark" -> 345, "scala" -> 678)
map: scala.collection.mutable.Map[String,Int] = Map(spark -> 345, hadoop -> 123, scala -> 678)scala> map("hadoop") = 100
scala> map("hadoop") //修改hadoop的值
res12: Int = 100scala> map("flink") = 200
scala> map("flink") //新增flink的值
res14: Int = 200scala> map("storm")
java.util.NoSuchElementException: key not found: storm
at scala.collection.MapLike$class.default(MapLike.scala:228)
at scala.collection.AbstractMap.default(Map.scala:59)
at scala.collection.mutable.HashMap.apply(HashMap.scala:65)
... 32 elidedscala> map.getOrElse("storm",300) //找不到就用默认值
res16: Int = 300scala> map.getOrElse("flink",300) //找得到就用设置的值
res17: Int = 200
可变map新增元素
scala> map += ("storm" -> 300,"hbase"-> 400)
res19: map.type = Map(spark -> 345, hadoop -> 100, scala -> 678, flink -> 200, storm -> 300, hbase -> 400)
判断元素在map是否存在
scala> map.contains("storm")
res21: Boolean = truescala> map.contains("kudu")
res22: Boolean = false
取出map中的值
scala> val map = scala.collection.mutable.Map("hadoop" -> 123, "spark" -> 345, "scala" -> 678)
map: scala.collection.mutable.Map[String,Int] = Map(spark -> 345, hadoop -> 123, scala -> 678)scala> for ((k,v) <- map){
| println(k)
| println(v)
| }
spark
345
hadoop
123
scala
678scala> for (k <- map.keySet){
| println(k)
| println(map(k))
| }
spark
345
hadoop
123
scala
678scala> for (v <- map.values){
| println(v)
| }
345
123
678