Spring Boot整合Echarts绘制静态数据柱状图、饼图
项目目录
需要echarts.min.js和jquery.js
pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.sid.spark</groupId>
<artifactId>webspark</artifactId>
<version>0.0.1-SNAPSHOT</version>
<packaging>jar</packaging>
<name>webspark</name>
<description>Demo project for Spring Boot</description>
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>2.0.3.RELEASE</version>
<relativePath/> <!-- lookup parent from repository -->
</parent>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<java.version>1.8</java.version>
</properties>
<dependencies>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-web</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-test</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-thymeleaf</artifactId>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>1.2.0</version>
</dependency>
<dependency>
<groupId>net.sf.json-lib</groupId>
<artifactId>json-lib</artifactId>
<version>2.4</version>
<classifier>jdk15</classifier>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
</plugins>
</build>
</project>
CourseClickCountDAO.java
package com.sid.dao;
import com.sid.domain.CourseClickCount;
import com.sid.utils.HBaseUtils;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* 实战课程访问数量数据访问层
*/
@Component
public class CourseClickCountDAO {
/**
* 根据天查询
*/
public List<CourseClickCount> query(String day) throws Exception {
List<CourseClickCount> list = new ArrayList<>();
// 去HBase表中根据day获取实战课程对应的访问量
Map<String, Long> map = HBaseUtils.getInstance().query("course_click_spark_streaming",day);
for(Map.Entry<String, Long> entry: map.entrySet()) {
CourseClickCount model = new CourseClickCount();
model.setName(entry.getKey());
model.setValue(entry.getValue());
list.add(model);
}
return list;
}
public static void main(String[] args) throws Exception{
CourseClickCountDAO dao = new CourseClickCountDAO();
List<CourseClickCount> list = dao.query("20180723");
System.out.println("executor for");
for(CourseClickCount model : list) {
System.out.println("into for");
System.out.println(model.getName() + " : " + model.getValue());
}
System.out.println("end for");
}
}
CourseClickCount.java
package com.sid.domain;
import org.springframework.stereotype.Component;
/**
* 实战课程访问数量实体类
*/
@Component
public class CourseClickCount {
//返回到前端的数据变量名要用name value 因为demo.html里面展示时候用的这两个变量接收数据
private String name;
private long value;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public long getValue() {
return value;
}
public void setValue(long value) {
this.value = value;
}
}
StatApp.java
package com.sid.spark;
import com.sid.dao.CourseClickCountDAO;
import com.sid.domain.CourseClickCount;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.ModelAndView;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* web层
*/
@RestController
public class StatApp {
private static Map<String, String> courses = new HashMap<>();
static {
courses.put("128","Spark SQL实战");
courses.put("130","Hadoop基础");
courses.put("131","Storm实战");
courses.put("146","Spark Streaming实战");
courses.put("145","MapReduce实战");
courses.put("112","理论");
}
@Autowired
CourseClickCountDAO courseClickCountDAO;
// @RequestMapping(value = "/course_clickcount_dynamic", method = RequestMethod.GET)
// public ModelAndView courseClickCount() throws Exception {
//
// ModelAndView view = new ModelAndView("index");
//
// List<CourseClickCount> list = courseClickCountDAO.query("20180723");
// for(CourseClickCount model : list) {
// model.setName(courses.get(model.getName().substring(9)));
// }
// JSONArray json = JSONArray.fromObject(list);
//
// view.addObject("data_json", json);
//
// return view;
// }
@RequestMapping(value = "/course_clickcount_dynamic", method = RequestMethod.POST)
@ResponseBody
public List<CourseClickCount> courseClickCount() throws Exception {
List<CourseClickCount> list = courseClickCountDAO.query("20180723");
for(CourseClickCount model : list) {
model.setName(courses.get(model.getName().substring(9)));
}
return list;
}
@RequestMapping(value = "/echarts", method = RequestMethod.GET)
public ModelAndView echarts(){
return new ModelAndView("echarts");
}
}
HBaseUtils.java
package com.sid.utils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
* HBase操作工具类
*/
public class HBaseUtils {
HBaseAdmin admin = null;
Configuration conf = null;
/**
* 私有构造方法:加载一些必要的参数
*/
private HBaseUtils() {
conf = new Configuration();
conf.set("hbase.zookeeper.quorum", "node1:2181,node2:2181,node3:2181");
conf.set("hbase.rootdir", "hdfs://hadoopcluster/hbase");
try {
admin = new HBaseAdmin(conf);
} catch (IOException e) {
e.printStackTrace();
}
}
private static HBaseUtils instance = null;
public static synchronized HBaseUtils getInstance() {
if (null == instance) {
instance = new HBaseUtils();
}
return instance;
}
/**
* 根据表名获取到HTable实例
*/
public HTable getTable(String tableName) {
HTable table = null;
try {
table = new HTable(conf, tableName);
} catch (IOException e) {
e.printStackTrace();
}
return table;
}
/**
* 根据表名和输入条件获取HBase的记录数
*/
public Map<String, Long> query(String tableName, String condition) throws Exception {
Map<String, Long> map = new HashMap<>();
HTable table = getTable(tableName);
String cf = "info";
String qualifier = "click_count";
Scan scan = new Scan();
//根据rowkey的前缀来查询
Filter filter = new PrefixFilter(Bytes.toBytes(condition));
scan.setFilter(filter);
ResultScanner rs = table.getScanner(scan);
for(Result result : rs) {
String row = Bytes.toString(result.getRow());
long clickCount = Bytes.toLong(result.getValue(cf.getBytes(), qualifier.getBytes()));
map.put(row, clickCount);
}
return map;
}
public static void main(String[] args) throws Exception {
Map<String, Long> map = HBaseUtils.getInstance().query("course_click_spark_streaming" , "20180723_131");
for(Map.Entry<String, Long> entry: map.entrySet()) {
System.out.println(entry.getKey() + " : " + entry.getValue());
}
}
}
echarts.html
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Demo</title>
<!-- 引入 ECharts 文件 -->
<script src="js/echarts.min.js"></script>
<!-- 引入 jQuery 文件 -->
<script src="js/jquery.js"></script>
</head>
<body>
<!-- 为 ECharts 准备一个具备大小(宽高)的 DOM -->
<div id="main" style="width: 600px;height:400px;position:absolute;top:50%;left: 50%;margin-top: -200px;margin-left: -300px;"></div>
<script type="text/javascript">
// 基于准备好的dom,初始化echarts实例
var myChart = echarts.init(document.getElementById('main'));//main是<div id="main" style="width: 600px;height:400px;"></div>的id
// 指定图表的配置项和数据
var option = {
title : {
text: 'Spark Streaming实战课程访问量实时统计',
subtext: '实战课程访问次数',
x:'center'
},
tooltip : {
trigger: 'item',
formatter: "{a} <br/>{b} : {c} ({d}%)"
},
legend: {
orient: 'vertical',
left: 'left',
data: ['Spark SQL实战','Hadoop基础','Storm实战','Spark Streaming实战','理论']
},
series : [
{
name: '访问次数',
type: 'pie',
radius : '55%',
center: ['50%', '60%'],
data:(function () { //<![CDATA[
var datas = [];
$.ajax({
type:"POST",
url:"/sid/course_clickcount_dynamic",
dataType:'json',
async:false,
success:function(result){
for(var i =0; i<result.length;i++){
datas.push({"value":result[i].value,"name":result[i].name})
}
}
})
return datas;
//]]>
})(),
itemStyle: {
emphasis: {
shadowBlur: 10,
shadowOffsetX: 0,
shadowColor: 'rgba(0, 0, 0, 0.5)'
}
}
}
]
};
// 使用刚指定的配置项和数据显示图表。
myChart.setOption(option);
</script>
</body>
</html>
运行
访问http://localhost:9999/sid/echarts
本地能跑通后打包提交到服务器上运行
cd /app
java -jar webspark-0.0.1-SNAPSHOT.jar
访问http://node1:9999/sid/echarts