canal通过监听mysql的binlog从而实现增量同步修改es(6.3-)

简介:canal通过监听mysql的binlog从而实现增量同步修改es(6.3-)

背景:最近在学习canal同步es,但遇到了很多版本方面的问题。目前es6.3以下的版本使用canal.adapter会存在报错的情况。原因es父子文档结构在6.3±不一致

处理逻辑:

在这里插入图片描述

环境信息

操作系统:linuxCentOS7

mysql5.7

canal1.15(canal.deployer)

es5.6.14(6.3以下)

jdk1.8

1.修改mysql , 配置,启动canal.deployer请看我上篇文章 跳转
2.第一步配置好了消息生产者(canal.deployer) 接下来java代码写消息消费者
2.1.编写配置文件
# 文件名 :conf-canal.properties
canal.ip = 127.0.0.1 #ip
canal.port = 11111 #端口
canal.instance = example #你要启动的实例
canal.dbName = yunzq_metro #监听数据库名

# 文件名:conf-ElasticSearch.properties
ElasticSearch.ip = 127.0.0.1 #es集群ip
ElasticSearch.port = 9300 #es tcp访问端口  一般是9300
ElasticSearch.name = gzdt-dev #es集群名称
ElasticSearch.env = test #我们项目用到的配置   实际不需要

# 文件名: conf-jdbc.properties   数据库配置
jdbc.driverClassName=com.mysql.jdbc.Driver
jdbc.url=jdbc:mysql://ip:3306/db_name?useUnicode=true&characterEncoding=UTF-8&allowMultiQueries=true&autoReconnect=true&failOverReadOnly=false
jdbc.username=root
jdbc.password=root
2.2.编写数据库连接池ConnectionPoolUtil.java

这位老哥写得挺全的我只是改动了一下成单例模式而已 跳转

2.3.定义映射关系类EsEntity.java
package com.concom.yunzq.search.util;

import java.util.Map;

/**
 * 用于db映射es的实体
 * @author tzf
 *
 */
public class EsEntity {

	/** 索引 */
	private String _index;
	/** 类型 */
	private String _type;
	/** 文档id对应的db字段 */
	private String _id_field;
	/** 文档parent_id对应的db字段 */
	private String _parent_id_field;
	/** <与db映射值,文档值> */
	private Map<String,String> fildMap;
	/** sqlMap {sql:"",params_field:"",isMain:bit} 执行sql,sql参数,是否为主表 */
	private Map<String,Object> sqlMap;

	public String get_index() {
		return _index;
	}

	public void set_index(String _index) {
		this._index = _index;
	}

	public String get_type() {
		return _type;
	}

	public void set_type(String _type) {
		this._type = _type;
	}

	public String get_id_field() {
		return _id_field;
	}

	public void set_id_field(String _id_field) {
		this._id_field = _id_field;
	}

	public String get_parent_id_field() {
		return _parent_id_field;
	}

	public void set_parent_id_field(String _parent_id_field) {
		this._parent_id_field = _parent_id_field;
	}

	public Map<String, String> getFildMap() {
		return fildMap;
	}

	public void setFildMap(Map<String, String> fildMap) {
		this.fildMap = fildMap;
	}
	
	
	public Map<String, Object> getSqlMap() {
		return sqlMap;
	}

	public void setSqlMap(Map<String, Object> sqlMap) {
		this.sqlMap = sqlMap;
	}

	/**
	 * 
	 * @param _index 索引
	 * @param _type 类型
	 * @param _id_field 文档id对应的db字段名
	 * @param fildMap <与db映射值,文档值>
	 */
	public EsEntity(String _index, String _type,String _id_field, Map<String, String> fildMap) {
		this._index = _index;
		this._type = _type;
		this._id_field = _id_field;
		this.fildMap = fildMap;
	}
	
	/**
	 * 
	 * @param _index 索引
	 * @param _type 类型
	 * @param _id_field 文档id对应的db字段名
	 * @param fildMap <与db映射值,文档值>
	 * @param sqlMap {sql:"",params_field:"",isMain:bit} 执行sql,sql参数, 是否为主表
	 */
	public EsEntity(String _index, String _type,String _id_field, Map<String, String> fildMap,Map<String, Object> sqlMap) {
		this._index = _index;
		this._type = _type;
		this._id_field = _id_field;
		this.fildMap = fildMap;
		this.sqlMap = sqlMap;
	}
	
	/**
	 * 
	 * @param _index 索引
	 * @param _type 类型
	 * @param _id_field 文档id对应的db字段名
	 * @param _parent_id_field 文档parent_id对应的db字段名
	 * @param fildMap <与db映射值,文档值>
	 */
	public EsEntity(String _index, String _type,String _id_field,String _parent_id_field, Map<String, String> fildMap) {
		this._index = _index;
		this._type = _type;
		this._id_field = _id_field;
		this._parent_id_field = _parent_id_field;
		this.fildMap = fildMap;
	}
	
	/**
	 * 
	 * @param _index 索引
	 * @param _type 类型
	 * @param _id_field 文档id对应的db字段名
	 * @param _parent_id_field 文档parent_id对应的db字段名
	 * @param fildMap <与db映射值,文档值>
	 * @param sqlMap {sql:"",params_field:"",isMain:bit} 执行sql,sql参数,是否为主表
	 */
	public EsEntity(String _index, String _type,String _id_field,String _parent_id_field, Map<String, String> fildMap,Map<String, Object> sqlMap) {
		this._index = _index;
		this._type = _type;
		this._id_field = _id_field;
		this._parent_id_field = _parent_id_field;
		this.fildMap = fildMap;
		this.sqlMap = sqlMap;
	}
		

}
2.4.定义db-es的映射关系elastic_mapping.java
package com.concom.yunzq.search.util;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import com.concom.yunzq.constant.ElasticConstant;


/**
 * es与db的映射关系
 * 这篇文章关键点在这,定义适合自己项目的映射关系的数据类型
 * @author tzf
 */
@SuppressWarnings("serial")
public class elastic_mapping {
    private static String env = "";
    static{
    	//获取配置中的env
    	InputStream is = SimpleCanalClient.class.getResourceAsStream("/config/conf-ElasticSearch.properties");
    	try {
    		Properties prop = new Properties();
			prop.load(is);
			env = prop.getProperty("ElasticSearch.env");
		} catch (IOException e) {
			e.printStackTrace();
		}finally {
			if (is != null) {
				try {
					is.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
    }
	
	/** <表名,es文档实体> */
	public static Map<String,List<EsEntity>> esMap = new HashMap<>();
	static {
		//[start] test
		esMap.put("teacher", //表名
				new ArrayList<EsEntity>(){{
						add(new EsEntity(
								"parent_search", //索引名
								"teacher_info", //类型名
								"id", //db中的id字段名
								//"parent_id", //db中的parent_id字段名
								new HashMap<String,String>(){{
									put("id","id"); //key:与es文档fild相同   value:与db列名相同
									put("name","name");
									put("age","age");
								}}
							)
						);*/
						add(new EsEntity(
								"parent_search", //索引名
								"relation", //类型名
								"id", //db中的id字段名
								new HashMap<String,String>(){{
									put("teacherName","teacherName"); //key:与es文档fild相同   value:与db列名相同
									put("studentId","studentId");
									put("studentName","studentName");
									put("teacherId","teacherId");
									put("createOn","createOn");
									put("sumStu","sumStu");
								}},
								//存在这个参数的时候强制执行sql结果集与es的映射  而不是db表字段
								//注意日期格式要转换噢DATE_FORMAT(v.suitOn,'%Y-%m-%d %h:%m:%s')
								new HashMap<String,Object>(){{
									put("sql","SELECT t.name teacherName,t.id teacherId,s.name studentName,s.id studentId,0 sumStu,createOn FROM student s LEFT JOIN teacher t ON s.teacher_id = t.id WHERE t.id = ?");
									put("params_field","teacher_id");
									put("isMain",false);
								}}
							)
						);*/
				}}
		);
		esMap.put("student", //表名
				new ArrayList<EsEntity>(){{
						/*add(new EsEntity(
								"parent_search", //索引名
								"student_info", //类型名
								"id", //db中的id字段名
								//"teacher_id", //db中的parent_id字段名
								new HashMap<String,String>(){{
									put("id","id"); //key:与es文档fild相同   value:与db列名相同
									put("name","name");
									put("age","age");
									put("teacher_id","teacher_id");
								}}
							)
						);*/
						/*add(new EsEntity(
								"parent_search", //索引名
								"relation", //类型名
								"id", //db中的id字段名
								new HashMap<String,String>(){{
									put("teacherName","teacherName"); //key:与es文档fild相同   value:与db列名相同
									put("studentId","studentId");
									put("studentName","studentName");
									put("teacherId","teacherId");
									put("createOn","createOn");
									put("sumStu","sumStu");
								}},
								new HashMap<String,Object>(){{
									put("sql","SELECT t.name teacherName,t.id teacherId,s.name studentName,s.id studentId, 0 sumStu,createOn FROM student s LEFT JOIN teacher t ON s.teacher_id = t.id WHERE s.id = ?");
								}}
							)
						);*/
				}}
		);
		//[end] test
	}
}
2.5.编写消息消费者SimpleCanalClient.java
package com.concom.yunzq.search.util;

import java.io.IOException;
import java.io.InputStream;
import java.net.InetSocketAddress;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;

import org.apache.log4j.Logger;

import com.alibaba.otter.canal.client.CanalConnector;
import com.alibaba.otter.canal.client.CanalConnectors;
import com.alibaba.otter.canal.protocol.CanalEntry;
import com.alibaba.otter.canal.protocol.CanalEntry.Column;
import com.alibaba.otter.canal.protocol.CanalEntry.Entry;
import com.alibaba.otter.canal.protocol.CanalEntry.EntryType;
import com.alibaba.otter.canal.protocol.CanalEntry.RowChange;
import com.alibaba.otter.canal.protocol.CanalEntry.RowData;
import com.alibaba.otter.canal.protocol.Message;
import com.alibaba.otter.canal.protocol.exception.CanalClientException;
import com.google.protobuf.InvalidProtocolBufferException;
/**
 * canal客户端
 * 用于接收服务端推送来的mysql数据改变信息
 * @author tzf
 *
 */
public class SimpleCanalClient {
	
	//private static ConcurrentLinkedQueue<String> SQL_QUEUE = new ConcurrentLinkedQueue<String>();
	
	private static Logger logger = Logger.getLogger(SimpleCanalClient.class);
	private static CanalConnector connector;// = CanalConnectors.newSingleConnector(new InetSocketAddress("127.0.0.1",11111), "example", "", "");
	private static volatile boolean isStart = false;
	private static Properties prop = new Properties();
	
	
	public static void main(String[] args) {
		/*logger.debug("小黑猫debugger");
		logger.info("小黑猫info");
		logger.warn("小黑猫warn");*/
		connCanal();
	}
	
	public static void startCanal(){
		if(isStart) {
			logger.warn("canal已启动,请忽重复启动");
		}else{
			isStart = true;
			connCanal();
		}
		logger.info("url-request canal启动成功");
	}
	
	public static void stopCanal(){
		try {
			isStart = false;
			connector.disconnect();
		} catch ( CanalClientException e) {
			e.printStackTrace();
		}
		logger.info( "url-request canal关闭成功");
	}
	
	public static void connCanal(){
		InputStream is = SimpleCanalClient.class.getResourceAsStream("/config/conf-canal.properties");
		String ip = "";
		Integer port = null;
		String instance = "";
		String dbName = "";
		try {
			prop.load(is);
			ip = prop.getProperty("canal.ip");
			port = Integer.parseInt(prop.getProperty("canal.port"));
			instance = prop.getProperty("canal.instance");
			dbName = prop.getProperty("canal.dbName");
			connector = CanalConnectors.newSingleConnector(new InetSocketAddress(ip,port), instance, "", "");
		} catch (IOException e) {
			e.printStackTrace();
		}finally {
			if (is != null) {
				try {
					is.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
		
		int batchSize = 1000;
		try {
			connector.connect();
			connector.subscribe(dbName + "\\..*");//监听的数据库校验  dbName\\..*
			connector.rollback();
			while (true) {
				try {
					//尝试从master那边拉去数据batchSize条记录,有多少取多少
					Message message = connector.getWithoutAck(batchSize);
					long batchId = message.getId();
					int size = message.getEntries().size();
					if (batchId == -1 || size == 0) {
						Thread.sleep(1000);
						connector.ack(batchId);
					} else {
						dataHandle(message.getEntries());
					}
				} catch (InterruptedException e) {
					e.printStackTrace();
				} catch (InvalidProtocolBufferException e) {
					e.printStackTrace();
				} 
			}
			
		} catch(Exception e){
			isStart = false;
			e.printStackTrace();
		}finally {
			connector.disconnect();
		}
	}
	
	/**
     * 数据处理
     *
     * @param entrys
     */
    private static void dataHandle(List<Entry> entrys) throws InvalidProtocolBufferException {
    	String opType = "";//对数据的操作类型 DELETE INSERT UPDATE 3种
        for (Entry entry : entrys) {
            if (EntryType.ROWDATA == entry.getEntryType()) {
            	opType = CanalEntry.RowChange.parseFrom(entry.getStoreValue()).getEventType().toString();
                //RowChange rowChange = RowChange.parseFrom(entry.getStoreValue());
                Map<String, Object> dbMap = canalAdapter(entry,opType);
                ElasticCRUD.elasticSearchAdapter(dbMap , opType);
            }
        }
        
    }
    
    /**
     * canal的适配
     * @param entry
     * entry.getHeader().getSchemaName()获取数据库名
     * entry.getHeader().getTableName() 获取表名
     * rowData.getBeforeColumnsList(); 获取修改前的集合
     * rowData.getAfterColumnsList();获取修改后的集合
     * newColumnList.get(i).getName() 获取列名
     * newColumnList.get(i).getValue() 获取属性
     * 
     * @return {
     * 		"tableName":"表名",
     * 		"fieldMap_old"	{"列名":"value值"},
     * 		"fieldMap_new"	{"列名":"value值"}
     * }
     */
    private static Map<String,Object> canalAdapter(Entry entry,String opType) {
    	
    	Map<String,Object> dbMap = new HashMap<String, Object>();
    	Map<String,String> fieldMap_old = new HashMap<String,String>();
    	Map<String,String> fieldMap_new = new HashMap<String,String>();
    	List<Column> oldColumnList = null;
    	List<Column> newColumnList = null;
        try {
            RowChange rowChange = RowChange.parseFrom(entry.getStoreValue());
            List<RowData> rowDatasList = rowChange.getRowDatasList();
            for (RowData rowData : rowDatasList) {
            	oldColumnList = rowData.getBeforeColumnsList();
            	newColumnList = rowData.getAfterColumnsList();
            	
        		for (Column column : oldColumnList) {
        			fieldMap_old.put(column.getName(), column.getValue());
        		}
				//获取修改后的数据
				for (Column column : newColumnList) {
					fieldMap_new.put(column.getName(), column.getValue());
				}
                
            }
            dbMap.put("tableName", entry.getHeader().getTableName());
            dbMap.put("fieldMap_old", fieldMap_old);
            dbMap.put("fieldMap_new", fieldMap_new);
        } catch (InvalidProtocolBufferException e) {
            e.printStackTrace();
        }
        return dbMap;
    }

}

2.6.编写es处理器
package com.concom.yunzq.search.util;


import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutionException;

import org.apache.log4j.Logger;
import org.elasticsearch.action.delete.DeleteResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.transport.client.PreBuiltTransportClient;
import org.springframework.transaction.annotation.Transactional;

import com.alibaba.fastjson.JSON;
import com.concom.yunzq.util.StringUtil;
import com.google.gson.JsonObject;

/**
 * es增删改
 * @author tzf
 *
 */
public class ElasticCRUD {
	
	private static Logger logger = Logger.getLogger(SimpleCanalClient.class);
	private static TransportClient client;
	
	
    

	static{
		logger.info("es初始化ing。。。");
		InputStream is = SimpleCanalClient.class.getResourceAsStream("/config/conf-ElasticSearch.properties");
		Properties prop = new Properties();
		String ip = "";
		Integer port = null;
		String name = "";
		try {
			prop.load(is);
			ip = prop.getProperty("ElasticSearch.ip");
			port = Integer.parseInt(prop.getProperty("ElasticSearch.port"));
			name = prop.getProperty("ElasticSearch.name");
		} catch (IOException e) {
			e.printStackTrace();
		}finally {
			if (is != null) {
				try {
					is.close();
				} catch (IOException e) {
					e.printStackTrace();
				}
			}
		}
		Settings settings = Settings.builder()  
	            .put("cluster.name", name)//指定集群名称    gzdt-dev   elasticsearch
	            .put("client.transport.sniff", true)//启动嗅探功能  
	            .build();
		
		try {
			//client = new PreBuiltTransportClient(settings);//TransportClient(settings);//("127.0.0.1", 9200)//InetSocketTransportAddress
            client = new PreBuiltTransportClient(settings).addTransportAddresses(new TransportAddress(InetAddress.getByName(ip), port));//
			List<DiscoveryNode> nodes = client.connectedNodes();
		} catch (UnknownHostException e) {
			e.printStackTrace();
		} 
	}
	
	public static void main(String[] args) {
		executeSql("SELECT * from tEstimateConstant where id = ? ", "9");
		
		//[start] updateTest
		/*JsonObject jsonObject = new JsonObject();
		jsonObject.addProperty("name", "小黑猫老师2");
		jsonObject.addProperty("id", "1");
		UpdateResponse updateResponse = client.prepareUpdate("parent_search", "teacher_info", "1").setDoc(jsonObject.toString(),XContentType.JSON).get();
		System.out.println(updateResponse);*/
		//[end] updateTest
		
		//[start] seacherById
		SearchResponse searchResponse = client.prepareSearch("parent_search")
				.setTypes("student_info")
				.setQuery(QueryBuilders.idsQuery().addIds("1"))
				.get();
		SearchHits hits = searchResponse.getHits();
		if(hits.getHits() != null && hits.getHits().length > 0){
			SearchHit searchHit = hits.getHits()[0];

			for (Iterator<DocumentField> iter = searchHit.iterator(); iter.hasNext();) {
				DocumentField next = iter.next();
				//System.out.println("检索到的父id为:" + next.getName()+" -- " +next.getValue());
				if(next.getName().equals("_parent")){
					System.out.println( next.getValue().toString());
				}
			}
			/*
			  for (SearchHit hit : searchHits){
			  		//获取source的值
			  		String sourceAsString = hit.getSourceAsString(); //取成json串
	                Map<String, Object> sourceAsMap = hit.getSourceAsMap(); // 取成map对象
	                
	                //取高亮结果
	                Map<String, HighlightField> highlightFields = hit.getHighlightFields();
	                HighlightField highlight = highlightFields.get("title");
	                Text[] fragments = highlight.fragments();
	                String fragmentString = fragments[0].string();
			  }
			*/
		}
		client.close();
		//[end] seacherById
	}
	
	/**
	 * es适配器(根据db变更,匹配对应的es操作)
	 * @param dbMap{"fieldMap_old":"db处理前的数据map" , "fieldMap_new":"db处理后的数据map"}
	 * @param type INSERT/UPDATE/DELETE
	 * @author tzf
	 */
	@Transactional
	public static void elasticSearchAdapter(Map<String, Object> dbMap,String type){//Map<String,Map<String,String>> dbMap,Integer type
			
		//获取表名
		String tableName = (String) dbMap.get("tableName");
		//获取映射关系
		List<EsEntity> esList = elastic_mapping.esMap.get(tableName);
		
		//判断改动的表对应有映射
		if (esList != null && esList.size() > 0) {
			Map<String,String> dbFieldMap_old = (Map<String,String>) dbMap.get("fieldMap_old");
			Map<String,String> dbFieldMap_new = (Map<String,String>) dbMap.get("fieldMap_new");
			
			logger.info("---- begin ---- 处理类型:" + type);
			logger.info("处理前数据:" + JSON.toJSONString(dbFieldMap_old));
			logger.info("处理后数据:" + JSON.toJSONString(dbFieldMap_new));
			
			switch (type) {
			case "INSERT":
				esOpInsertData(dbFieldMap_old,dbFieldMap_new,esList);
				break;
			case "UPDATE":
				esOpUpdateData(dbFieldMap_old,dbFieldMap_new,esList);
				break;
			case "DELETE":
				esDelete(dbFieldMap_old,dbFieldMap_new,esList);
				break;
			}
			
			logger.info("---- end ----");
		}
	}
	
	/**
     * 拼接es修改的json数据
     * @param fieldMap_old  db处理前的数据map 
     * @param fieldMap_new  db处理后的数据map
     * @param list  db表映射的文档集合对象
     * @author tzf
     */
	private static void esOpUpdateData(Map<String,String> dbFieldMap_old,Map<String,String> dbFieldMap_new,List<EsEntity> list){
			String _index = "";//es索引
			String _type = "";//es类型
			String _id = "";//es文档id
			String _parent_id = "";//es父文档id
			String params_field = null;//sql参数列名
			String foreignValue = null;//sql参数value
			List<Map<String, String>> executeSqlList = null;
			Map<String, String> paramMap = null;
			
			try {
				//遍历db表对应的多个文档映射
				for (EsEntity esEntity : list) {
					_index = esEntity.get_index();
					_type = esEntity.get_type();
					
					//存在sql的情况   强制执行sql的映射
					if (esEntity.getSqlMap() != null) {
						params_field = esEntity.getSqlMap().get("params_field").toString();
						foreignValue = StringUtil.isNotEmpty(dbFieldMap_old.get(params_field)) ? dbFieldMap_old.get(params_field) : dbFieldMap_new.get(params_field);
						executeSqlList = executeSql(esEntity.getSqlMap().get("sql").toString(), foreignValue);
						for (Map<String, String> executeSqlMap : executeSqlList) {
							//_id
							_id = executeSqlMap.get(esEntity.get_id_field());
							//_parent_id
							if(esEntity.get_parent_id_field() != null && esEntity.get_parent_id_field().length() > 0){
								_parent_id = findParentIdById(_index, _type, _id);
							}
							paramMap = executeSqlMap != null ? executeSqlMap : dbFieldMap_new;
							esUpdate(_index, _type, _id, _parent_id, paramMap, esEntity);
						}
					}else{
						//_id
						_id = StringUtil.isNotEmpty(dbFieldMap_old.get(esEntity.get_id_field())) ? dbFieldMap_old.get(esEntity.get_id_field()) : dbFieldMap_new.get(esEntity.get_id_field());
						//_parent_id
						if(esEntity.get_parent_id_field() != null && esEntity.get_parent_id_field().length() > 0){
							_parent_id = findParentIdById(_index, _type, _id);
						}
						esUpdate(_index, _type, _id, _parent_id, dbFieldMap_new, esEntity);
					}
				}
			} catch (Exception e) {
				e.printStackTrace();
			}
    }

	/**
	 * es修改
	 * @param _index es索引
	 * @param _type es类型
	 * @param _id es文档id
	 * @param _parent_id es父id
	 * @param paramMap 更新的参数
	 * @param esEntity es-db映射
	 * @author tzf
	 */
	private static void esUpdate(String _index, String _type, String _id, String _parent_id,
			Map<String, String> paramMap, EsEntity esEntity) {
		UpdateResponse response;
		JsonObject jsonObject = new JsonObject();
		
		for (Map.Entry<String, String> mapItem : esEntity.getFildMap().entrySet()) {
			jsonObject.addProperty(mapItem.getKey(), paramMap.get(mapItem.getValue()) == null || paramMap.get(mapItem.getValue()).length() == 0 ? null : paramMap.get(mapItem.getValue()));
		}
		
		logger.info("es_update操作:_index:" + _index + "; _type:" + _type + "; _id:" + _id + "; __parent_id:" + _parent_id);
		logger.info(jsonObject.toString());
		if(_parent_id.length() > 0){
			response = client.prepareUpdate(_index, _type, _id).setParent(_parent_id).setDoc(jsonObject.toString(),XContentType.JSON).get();
		}else{
			response = client.prepareUpdate(_index, _type, _id).setDoc(jsonObject.toString(),XContentType.JSON).get();
		}
		logger.info("es_update执行结果:" + response.toString());
	}
	
	
	
	/**
	 * es删除数据
	 * @param fieldMap_old  db处理前的数据map 
     * @param fieldMap_new  db处理后的数据map
     * @param list  db表映射的文档集合对象
	 * @author tzf
	 */
	private static void esDelete(Map<String,String> dbFieldMap_old,Map<String,String> dbFieldMap_new,List<EsEntity> list){
 		String _index = "";
		String _type = "";
		String _id = "";
		String _parent_id = "";
		DeleteResponse response = null;
		JsonObject jsonObject = null;
		UpdateResponse updateResponse = null;
		List<Map<String, String>> executeSqlList = null;
		
		try {
			//遍历db表对应的多个文档
			for (EsEntity esEntity : list) {
				_index = esEntity.get_index();
				_type = esEntity.get_type();
				
				//存在sql,强行执行sql映射
				if (esEntity.getSqlMap() != null && !(boolean)esEntity.getSqlMap().get("isMain")) {
					executeSqlList = executeSql(esEntity.getSqlMap().get("sql").toString(), dbFieldMap_old.get(esEntity.getSqlMap().get("params_field").toString()));
					for (Map<String, String> executeSqlMap : executeSqlList) {
						_id = executeSqlMap.get(esEntity.get_id_field());
						//存在父文档的情况
						if(esEntity.get_parent_id_field() != null && esEntity.get_parent_id_field().length() > 0){
							_parent_id = findParentIdById(_index, _type, _id);
						}
						
						//遍历每个文档的映射
						jsonObject = new JsonObject();
						for (Map.Entry<String, String> mapItem : esEntity.getFildMap().entrySet()) {
							jsonObject.addProperty(mapItem.getKey(), executeSqlMap.get(mapItem.getValue()) == null || executeSqlMap.get(mapItem.getValue()).length() == 0 ? null : executeSqlMap.get(mapItem.getValue()));
						}
						logger.info("es_delete次表(update)操作:_index:" + _index + "; _type:" + _type + "; _id:" + _id + "; __parent_id:" + _parent_id);
						logger.info(jsonObject.toString());
						if(_parent_id.length() > 0){
							updateResponse = client.prepareUpdate(_index, _type, _id).setParent(_parent_id).setDoc(jsonObject.toString(),XContentType.JSON).get();
						}else{
							updateResponse = client.prepareUpdate(_index, _type, _id).setDoc(jsonObject.toString(),XContentType.JSON).get();
						}
						logger.info("es_delete次表(update)操作: " + response.toString());
					}
				}else{
					//_id
					_id = dbFieldMap_old.get(esEntity.get_id_field());
					//_parent_id
					if(esEntity.get_parent_id_field() != null && esEntity.get_parent_id_field().length() > 0){
						_parent_id = findParentIdById(_index, _type, _id);
					}
					logger.info("es_delete操作:_index:" + _index + "; _type:" + _type + "; _id:" + _id + "; __parent_id:" + _parent_id);
					if(_parent_id.length() > 0){
						response = client.prepareDelete(_index, _type, _id).setParent(_parent_id).execute().get();
			        }else{
			        	response = client.prepareDelete(_index, _type, _id).execute().get();
			        }
					logger.info("es_delete执行结果:" + response.toString());
				}
				
		        
			}
		} catch (ExecutionException e) {
			e.printStackTrace();
		} catch (InterruptedException e) {
			e.printStackTrace();
		}
		
	}
	
	/**
	 * 拼接es新增的json数据
	 * @param fieldMap_old  db处理前的数据map 
     * @param fieldMap_new  db处理后的数据map
     * @param list  db表映射的文档集合对象
	 * @author tzf
	 */
	private static void esOpInsertData(Map<String,String> dbFieldMap_old,Map<String,String> dbFieldMap_new,List<EsEntity> list){
		String _index = "";
		String _type = "";
		String _id = "";
		String _parent_id = "";
		String params_field = null;
		List<Map<String, String>> executeSqlList = null;

		//遍历db表对应的多个文档
		for (EsEntity esEntity : list) {
			_index = esEntity.get_index();
			_type = esEntity.get_type();
			
			//存在sql的情况   强制执行sql的映射
			if (esEntity.getSqlMap() != null) {
				params_field = esEntity.getSqlMap().get("params_field").toString();
				executeSqlList = executeSql(esEntity.getSqlMap().get("sql").toString(), dbFieldMap_new.get(params_field));
				for (Map<String, String> executeSqlMap : executeSqlList) {
					_id = executeSqlMap.get(esEntity.get_id_field());
					//parent_id
					if (esEntity.get_parent_id_field() != null && esEntity.get_parent_id_field().length() > 0 ){
						_parent_id = executeSqlMap.get(esEntity.get_parent_id_field());
						if (_parent_id == null || _parent_id.length() == 0) {
							_parent_id = "0";
						}
					}
					esInsert(_index, _type, _id, _parent_id, esEntity, executeSqlMap);
				}
			}else{
				//id
				_id = dbFieldMap_new.get(esEntity.get_id_field());
				//parent_id
				if (esEntity.get_parent_id_field() != null && esEntity.get_parent_id_field().length() > 0 ){
					_parent_id = dbFieldMap_new.get(esEntity.get_parent_id_field());
					if (_parent_id == null || _parent_id.length() == 0) {
						_parent_id = "0";
					}
				}
				esInsert(_index, _type, _id, _parent_id, esEntity, dbFieldMap_new);
			}
		}

	}

	/**
	 * es新增
	 * @param _index es索引
	 * @param _type es类型
	 * @param _id es文档id
	 * @param _parent_id es父id
	 * @param esEntity es-db映射
	 * @param paramMap 更新的参数
	 * @author tzf
	 */
	private static void esInsert(String _index, String _type, String _id, String _parent_id, EsEntity esEntity,
			Map<String, String> paramMap){
		try {
			IndexResponse response;
			//遍历每个文档的映射
			XContentBuilder builder = XContentFactory.jsonBuilder().startObject();;
			for (Map.Entry<String, String> mapItem : esEntity.getFildMap().entrySet()) {
				builder.field(mapItem.getKey(), paramMap.get(mapItem.getValue()) == null || paramMap.get(mapItem.getValue()).length() == 0 ? null : paramMap.get(mapItem.getValue()));
			}
			builder.endObject();

			logger.info("es_insert操作:_index:" + _index + "; _type:" + _type + "; _id:" + _id + "; __parent_id:" + _parent_id);
			logger.info(JSON.toJSONString(builder));
			if(_parent_id.length() > 0){
				response = client.prepareIndex(_index, _type, _id).setParent(_parent_id).setSource(builder).execute().get();
			}else{
				response = client.prepareIndex(_index, _type, _id).setSource(builder).execute().get();
			}
			logger.info("es_insert执行结果:" + response.toString());
		} catch (IOException e) {
			e.printStackTrace();
		} catch (InterruptedException e) {
			e.printStackTrace();
		} catch (ExecutionException e) {
			e.printStackTrace();
		}
	}
	
	/**
	 * 根据文档id查找父id
	 * @param _index
	 * @param _type
	 * @param _id
	 * @return _parent_id
	 * @author tzf
	 */
	private static String findParentIdById(String _index,String _type,String _id){
		String _parent_id = "";
		SearchResponse searchResponse = client.prepareSearch(_index)
				.setTypes(_type)
				.setQuery(QueryBuilders.idsQuery().addIds(_id))
				.get();
		SearchHits hits = searchResponse.getHits();
		if(hits.getHits() != null && hits.getHits().length > 0){
			SearchHit searchHit = hits.getHits()[0];

			for (Iterator<DocumentField> iter = searchHit.iterator(); iter.hasNext();) {
				DocumentField next = iter.next();
				//System.out.println("检索到的父id为:" + next.getName()+" -- " +next.getValue());
				if(next.getName().equals("_parent")){
					_parent_id = next.getValue();
				}
			}
		}
		
		//根据业务逻辑增加父id为null的时候 设置为0
		return _parent_id.length() > 0 ? _parent_id : "0";
	}
	
	/**
	 * 执行sql
	 * 将es-db表字段映射   改为es-sql结果集映射
	 * @author tzf
	 */
	private static List<Map<String,String>> executeSql(String sql,String id){
		ConnectionPoolUtil connectionPoolUtil = null;
		PreparedStatement pstm = null;
		Connection conn = null;
		List<Map<String,String>> results = new ArrayList<Map<String,String>>();
		try {
			connectionPoolUtil = ConnectionPoolUtil.getConnectionPoolUtil();
			conn = connectionPoolUtil.getConnection();
			pstm = conn.prepareStatement(sql);
			logger.info("sql:" + sql.replaceAll("\n", " "));
			logger.info("sql参数:" + id);
			pstm.setString(1, id);
			ResultSet rs = pstm.executeQuery();

			ResultSetMetaData rsmd = rs.getMetaData();  
			int colCount = rsmd.getColumnCount();
			List<String> colNameList=new ArrayList<String>();
			for(int i=0;i<colCount;i++){
				colNameList.add(rsmd.getColumnLabel(i+1));
			}
			Map<String, String> map = null;
			String key = "";
			String value = "";
			while(rs.next()){
				map = new HashMap<String, String>();
				for(int i=0; i<colCount; i++){
					key = colNameList.get(i);
					value = rs.getString(colNameList.get(i));
					map.put(key, value);
				}
				results.add(map);
			}
			logger.info("sql结果:" + (results == null ? 0 : results.size()));
			if (results == null || results.size() == 0) {
				//throw new Exception("sql执行的结果集数量异常!sql: "+sql.replaceAll("\n", " "));
			}
		} catch (SQLException e) {
			e.printStackTrace();
		}catch (Exception e) {
			e.printStackTrace();
		}finally {
			try {
				if (pstm != null) {
					pstm.close();
				}
				if (conn != null) {
					conn.close();
				}
			} catch (SQLException e) {
				e.printStackTrace();
			}
		}
		//System.out.println(results);
		return results;
	}


}

3.创建好你的es索引,和mysql数据表
-- 数据库表
CREATE TABLE `student` (
  `id` int(11) NOT NULL AUTO_INCREMENT,
  `name` varchar(255) DEFAULT NULL,
  `age` int(11) DEFAULT NULL,
  `teacher_id` int(11) DEFAULT NULL,
  PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=11 DEFAULT CHARSET=utf8;
CREATE TABLE `teacher` (
  `id` int(11) NOT NULL AUTO_INCREMENT,
  `name` varchar(255) DEFAULT NULL,
  `age` int(11) DEFAULT NULL,
  PRIMARY KEY (`id`)
) ENGINE=InnoDB AUTO_INCREMENT=103 DEFAULT CHARSET=utf8;

-- es建立索引
-- 这里测试了父子文档   连表的情况
parent_search put
{
    "mappings":{
        "teacher_info":{
            "properties":{
                "name":{
                    "type":"text"
                },
                "id":{
                    "type":"long"
                },
                "age":{
                    "type":"long"
                }
            }
        },
        "student_info":{
            "_routing":{
                "required":true
            },
            "_parent":{
                "type":"teacher_info"
            },
            "properties":{
                "teacher_id":{
                    "type":"long"
                },
                "name":{
                    "type":"text"
                },
                "id":{
                    "type":"long"
                },
                "age":{
                    "type":"long"
                }
            }
        }
    },
    "relation":{
        "teacherName":{
            "type":"text"
        },
        "studentId":{
            "type":"long"
        },
        "studentName":{
            "type":"text"
        },
        "teacherId":{
            "type":"long"
        },
        "createOn":{
            "type":"text"
        },
        "sumStu":{
            "type":"text"
        }
    }
}

4.启动服务测试类SimpleCanalClient.java -main

项目中使用shiro.xml权限验证的 可以开放一个跳过权限校验的接口

package com.concom.yunzq.search.controller;

import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.ResponseBody;

import com.concom.yunzq.search.util.SimpleCanalClient;

@Controller
@RequestMapping ( "/canalClient" )
public class ElasticSynchronousController {
	
	@RequestMapping(value = "/startCanal", method = RequestMethod.GET)
	@ResponseBody
	public String startCanal(){
		new Thread() {
			public void run() {
				try {
					SimpleCanalClient.startCanal();
				} catch (Exception ex) {
					ex.printStackTrace();
				}
			}
		}.start();
		return "canal启动成功!";
	}
	
	
	@RequestMapping ( value = "/stopCanal", method = RequestMethod.GET)
	@ResponseBody
	public String stopCanal(){
		SimpleCanalClient.stopCanal();
		return "canal关闭成功!";
	}
}

寄语:不懂就问,白嫖党们拿好,记得下次一定噢

  • 5
    点赞
  • 8
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值