test("hbase") { val sqlContext = SparkSession.builder().getOrCreate().sqlContext //拉取hbase数据 val hBaseConfiguration = HBaseConfiguration.create() hBaseConfiguration.set(TableInputFormat.INPUT_TABLE, "apptmslogs:waybillInfo") //hbase 中的表 hBaseConfiguration.set(TableInputFormat.SCAN_COLUMNS, "cf1:airwaybillno cf1:arrivecity") //hbase 字段 ,多个字段空格隔开 hBaseConfiguration.set(TableInputFormat.SCAN_ROW_START, "20180517132443-Y01591805170032") //rowkey开始行 hBaseConfiguration.set(TableInputFormat.SCAN_ROW_STOP, "20180615170225-Y01591806150061") //rowkey结束行 val hbaseRDD: RDD[(ImmutableBytesWritable, Result)] = sc.newAPIHadoopRDD(hBaseConfiguration, classOf[TableInputFormat], classOf[ImmutableBytesWritable], classOf[Result]) val rowRDD = hbaseRDD.map({ case (ib, result) => val cells = result.rawCells() val it = cells.iterator val airwaybillno = it.next() //第一个字段 val arrivecity = it.next() //第二个字段 //转换成RDD[Row] Row(new String(airwaybillno.getValueArray, airwaybillno.getValueOffset, airwaybillno.getValueLength), new String(arrivecity.getValueArray, arrivecity.getValueOffset, arrivecity.getValueLength)) }) val structType = StructType(StructField("airwaybillno", StringType, true) :: StructField("arrivecity", StringType, true) :: Nil)//构造schema val df = sqlContext.createDataFrame(rowRDD, structType)//转成DataFrame df.createOrReplaceTempView("waybillInfo")//注册表 val sqldf = sqlContext.sql("select * from waybillInfo") sqldf.show() }
查询结果展示
+-----------------+----------+
| airwaybillno|arrivecity|
+-----------------+----------+
|"Y01591805170032"| "咸阳"|
|"Y01591806010063"| "安阳"|
|"Y00481806020005"| "兰州"|
|"Y00491806120014"| "厦门"|
|"Y00491806120033"| "厦门"|
|"Y00491806120052"| "厦门"|
|"Y00491806130141"| "厦门"|
|"Y00491806140033"| "厦门"|
|"Y00491806140052"| "厦门"|
+-----------------+----------+