ElasticSearch 2.4.2

PrjElasticSearchVer2.4.2


package zengwenfeng;

import java.net.InetAddress;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;

public class CreatIndex
{
	/**
	 * 创建mapping(feid("indexAnalyzer","ik")该字段分词IK索引 ;feid("searchAnalyzer","ik")该字段分词ik查询;具体分词插件请看IK分词插件说明)
	 * @param indices 索引名称;
	 * @param mappingType 类型
	 * @throws Exception
	 */
	public static void createMapping(String indices, String mappingType) throws Exception
	{
		Client client = TransportClient.builder().build().addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), 9300));

		client.admin().indices().prepareCreate(indices).execute().actionGet();
		
		
		new XContentFactory();
		XContentBuilder builder = XContentFactory.jsonBuilder().startObject().startObject(mappingType).startObject("properties").startObject("title").field("type", "string").field("store", "yes").field("analyzer", "ik").field("index", "analyzed").endObject().startObject("content").field("type", "string").field("store", "yes").field("analyzer", "ik").field("index", "analyzed").endObject().endObject().endObject().endObject();
		PutMappingRequest mapping = Requests.putMappingRequest(indices).type(mappingType).source(builder);

		client.admin().indices().putMapping(mapping).actionGet();
		client.close();
	}
}
package zengwenfeng;

import org.elasticsearch.index.query.QueryBuilders;

import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.HashMap;
import java.util.Map;

import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.transport.TransportClient;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;

public class Test2
{
	/**
	 * @param args
	 */
	public static void main(String[] args)
	{
		IndexResponse response = null;
		Client client = null;
		Map<String, Object> json = new HashMap<String, Object>();//构建json
		json.put("title", "novel");
		json.put("content", "the content");

		try
		{
			CreatIndex.createMapping("test", "text");//创建index和type
		}
		catch (Exception e1)
		{
			e1.printStackTrace();
		}

		// client startup
		try
		{
			client = TransportClient.builder().build()//新建客户端
			.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("127.0.0.1"), 9300));
			response = client.prepareIndex("test", "text", "1")//插入文档,参数依次为index、type、id
			.setSource(json).get();

		}
		catch (UnknownHostException e)
		{
			e.printStackTrace();
		}
		// Index name
		String _index = response.getIndex();
		// Type name
		String _type = response.getType();
		// Document ID (generated or not)
		String _id = response.getId();
		// Version (if it's the first time you index this document, you will get: 1)
		long _version = response.getVersion();
		// isCreated() is true if the document is a new one, false if it has been updated
		boolean created = response.isCreated();//插入后返回的信息可以通过response获取
		System.out.println("index:" + _index);
		System.out.println("type:" + _type);
		System.out.println("id:" + _id);
		System.out.println("version:" + _version);
		System.out.println("created:" + created);

		//查询
		QueryBuilder qb = QueryBuilders.matchQuery("content", "前后不到");

		SearchResponse searchresponse = client.prepareSearch("test")//index
		.setTypes("text")//type
		.setQuery(qb)//query
		.addHighlightedField("content")//高亮字段
		.setHighlighterPreTags("<b>")//设置搜索条件前后标签
		.setHighlighterPostTags("</b>").setHighlighterFragmentSize(50)//返回的高亮部分上下文长度
		.execute().actionGet();

		SearchHits hits = searchresponse.getHits();//获取返回值
		if (hits.totalHits() > 0)
		{
			for (SearchHit hit : hits)
			{
				System.out.println("score:" + hit.getScore() + ":\t" + hit.getId());// .get("title").getSource()
				System.out.println("content:" + hit.getHighlightFields());
			}
		}
		else
		{
			System.out.println("搜到0条结果");
		}
		client.close();

	}

}


java.lang.UnsupportedClassVersionError: org/elasticsearch/common/transport/TransportAddress : Unsupported major.minor version 51.0
	at java.lang.ClassLoader.defineClass1(Native Method)
	at java.lang.ClassLoader.defineClassCond(Unknown Source)
	at java.lang.ClassLoader.defineClass(Unknown Source)
	at java.security.SecureClassLoader.defineClass(Unknown Source)
	at java.net.URLClassLoader.defineClass(Unknown Source)
	at java.net.URLClassLoader.access$000(Unknown Source)
	at java.net.URLClassLoader$1.run(Unknown Source)
	at java.security.AccessController.doPrivileged(Native Method)
	at java.net.URLClassLoader.findClass(Unknown Source)
	at java.lang.ClassLoader.loadClass(Unknown Source)
	at sun.misc.Launcher$AppClassLoader.loadClass(Unknown Source)
	at java.lang.ClassLoader.loadClass(Unknown Source)
Exception in thread "main" 

log4j:WARN No appenders could be found for logger (org.elasticsearch.plugins).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
NoNodeAvailableException[None of the configured nodes are available: [{#transport#-1}{127.0.0.1}{127.0.0.1:9300}]]
	at org.elasticsearch.client.transport.TransportClientNodesService.ensureNodesAreAvailable(TransportClientNodesService.java:290)
	at org.elasticsearch.client.transport.TransportClientNodesService.execute(TransportClientNodesService.java:207)
	at org.elasticsearch.client.transport.support.TransportProxyClient.execute(TransportProxyClient.java:55)
	at org.elasticsearch.client.transport.TransportClient.doExecute(TransportClient.java:288)
	at org.elasticsearch.client.support.AbstractClient.execute(AbstractClient.java:359)
	at org.elasticsearch.client.support.AbstractClient$IndicesAdmin.execute(AbstractClient.java:1226)
	at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:86)
	at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:56)
	at zengwenfeng.CreatIndex.createMapping(CreatIndex.java:24)
	at zengwenfeng.Test2.main(Test2.java:34)
Exception in thread "main" NoNodeAvailableException[None of the configured nodes are available: [{#transport#-1}{127.0.0.1}{127.0.0.1:9300}]]
	at org.elasticsearch.client.transport.TransportClientNodesService.ensureNodesAreAvailable(TransportClientNodesService.java:290)
	at org.elasticsearch.client.transport.TransportClientNodesService.execute(TransportClientNodesService.java:207)
	at org.elasticsearch.client.transport.support.TransportProxyClient.execute(TransportProxyClient.java:55)
	at org.elasticsearch.client.transport.TransportClient.doExecute(TransportClient.java:288)
	at org.elasticsearch.client.support.AbstractClient.execute(AbstractClient.java:359)
	at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:86)
	at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:56)
	at org.elasticsearch.action.ActionRequestBuilder.get(ActionRequestBuilder.java:64)
	at zengwenfeng.Test2.main(Test2.java:47)


[2017-07-13 20:20:38,438][WARN ][bootstrap                ] jvm uses the client vm, make sure to run `java` with the server vm for best performance by adding `-server` to the command line
[2017-07-13 20:20:40,079][INFO ][node                     ] [Kurt Wagner] version[2.4.2], pid[4056], build[161c65a/2016-11-17T11:51:03Z]
[2017-07-13 20:20:40,079][INFO ][node                     ] [Kurt Wagner] initializing ...
[2017-07-13 20:20:42,735][INFO ][plugins                  ] [Kurt Wagner] modules [lang-groovy, reindex, lang-expression], plugins [head], sites [head]
[2017-07-13 20:20:42,907][INFO ][env                      ] [Kurt Wagner] using [1] data paths, mounts [[(C:)]], net usable_space [37.3gb], net total_space [59.9gb], spins? [unknown], types [NTFS]
[2017-07-13 20:20:42,907][INFO ][env                      ] [Kurt Wagner] heap size [1011.2mb], compressed ordinary object pointers [unknown]
[2017-07-13 20:20:50,360][INFO ][node                     ] [Kurt Wagner] initialized
[2017-07-13 20:20:50,360][INFO ][node                     ] [Kurt Wagner] starting ...
[2017-07-13 20:20:51,016][INFO ][transport                ] [Kurt Wagner] publish_address {192.168.1.114:9300}, bound_addresses {0.0.0.0:9300}
[2017-07-13 20:20:51,079][INFO ][discovery                ] [Kurt Wagner] elasticsearch/TgvtoomdQea3zIQX_M_1aQ
[2017-07-13 20:20:55,173][INFO ][cluster.service          ] [Kurt Wagner] new_master {Kurt Wagner}{TgvtoomdQea3zIQX_M_1aQ}{192.168.1.114}{192.168.1.114:9300}, reason: zen-disco-join(elected_as_master, [0] joins received)
[2017-07-13 20:20:55,329][INFO ][http                     ] [Kurt Wagner] publish_address {192.168.1.114:9200}, bound_addresses {0.0.0.0:9200}
[2017-07-13 20:20:55,329][INFO ][node                     ] [Kurt Wagner] started
[2017-07-13 20:20:55,360][INFO ][gateway                  ] [Kurt Wagner] recovered [0] indices into cluster_state
[2017-07-13 20:22:20,735][WARN ][transport.netty          ] [Kurt Wagner] exception caught on transport layer [[id: 0x0c3daea6, /192.168.1.101:59094 => /192.168.1.114:9300]], closing connection
java.lang.IllegalStateException: Received message from unsupported version: [1.0.0] minimal compatible version is: [2.0.0]
	at org.elasticsearch.transport.netty.MessageChannelHandler.messageReceived(MessageChannelHandler.java:110)
	at org.jboss.netty.channel.SimpleChannelUpstreamHandler.handleUpstream(SimpleChannelUpstreamHandler.java:70)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564)
	at org.jboss.netty.channel.DefaultChannelPipeline$DefaultChannelHandlerContext.sendUpstream(DefaultChannelPipeline.java:791)
	at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:296)
	at org.jboss.netty.handler.codec.frame.FrameDecoder.unfoldAndFireMessageReceived(FrameDecoder.java:462)
	at org.jboss.netty.handler.codec.frame.FrameDecoder.callDecode(FrameDecoder.java:443)
	at org.jboss.netty.handler.codec.frame.FrameDecoder.messageReceived(FrameDecoder.java:303)
	at org.jboss.netty.channel.SimpleChannelUpstreamHandler.handleUpstream(SimpleChannelUpstreamHandler.java:70)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564)
	at org.jboss.netty.channel.DefaultChannelPipeline$DefaultChannelHandlerContext.sendUpstream(DefaultChannelPipeline.java:791)
	at org.elasticsearch.common.netty.OpenChannelsHandler.handleUpstream(OpenChannelsHandler.java:75)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:559)
	at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:268)
	at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:255)
	at org.jboss.netty.channel.socket.nio.NioWorker.read(NioWorker.java:88)
	at org.jboss.netty.channel.socket.nio.AbstractNioWorker.process(AbstractNioWorker.java:108)
	at org.jboss.netty.channel.socket.nio.AbstractNioSelector.run(AbstractNioSelector.java:337)
	at org.jboss.netty.channel.socket.nio.AbstractNioWorker.run(AbstractNioWorker.java:89)
	at org.jboss.netty.channel.socket.nio.NioWorker.run(NioWorker.java:178)
	at org.jboss.netty.util.ThreadRenamingRunnable.run(ThreadRenamingRunnable.java:108)
	at org.jboss.netty.util.internal.DeadLockProofWorker$1.run(DeadLockProofWorker.java:42)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
[2017-07-13 20:22:20,938][WARN ][transport.netty          ] [Kurt Wagner] exception caught on transport layer [[id: 0x62cbceec, /192.168.1.101:59101 => /192.168.1.114:9300]], closing connection
java.lang.IllegalStateException: Received message from unsupported version: [1.0.0] minimal compatible version is: [2.0.0]
	at org.elasticsearch.transport.netty.MessageChannelHandler.messageReceived(MessageChannelHandler.java:110)
	at org.jboss.netty.channel.SimpleChannelUpstreamHandler.handleUpstream(SimpleChannelUpstreamHandler.java:70)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564)
	at org.jboss.netty.channel.DefaultChannelPipeline$DefaultChannelHandlerContext.sendUpstream(DefaultChannelPipeline.java:791)
	at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:296)
	at org.jboss.netty.handler.codec.frame.FrameDecoder.unfoldAndFireMessageReceived(FrameDecoder.java:462)
	at org.jboss.netty.handler.codec.frame.FrameDecoder.callDecode(FrameDecoder.java:443)
	at org.jboss.netty.handler.codec.frame.FrameDecoder.messageReceived(FrameDecoder.java:303)
	at org.jboss.netty.channel.SimpleChannelUpstreamHandler.handleUpstream(SimpleChannelUpstreamHandler.java:70)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564)
	at org.jboss.netty.channel.DefaultChannelPipeline$DefaultChannelHandlerContext.sendUpstream(DefaultChannelPipeline.java:791)
	at org.elasticsearch.common.netty.OpenChannelsHandler.handleUpstream(OpenChannelsHandler.java:75)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:564)
	at org.jboss.netty.channel.DefaultChannelPipeline.sendUpstream(DefaultChannelPipeline.java:559)
	at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:268)
	at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:255)
	at org.jboss.netty.channel.socket.nio.NioWorker.read(NioWorker.java:88)
	at org.jboss.netty.channel.socket.nio.AbstractNioWorker.process(AbstractNioWorker.java:108)
	at org.jboss.netty.channel.socket.nio.AbstractNioSelector.run(AbstractNioSelector.java:337)
	at org.jboss.netty.channel.socket.nio.AbstractNioWorker.run(AbstractNioWorker.java:89)
	at org.jboss.netty.channel.socket.nio.NioWorker.run(NioWorker.java:178)
	at org.jboss.netty.util.ThreadRenamingRunnable.run(ThreadRenamingRunnable.java:108)
	at org.jboss.netty.util.internal.DeadLockProofWorker$1.run(DeadLockProofWorker.java:42)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)



network.host: 127.0.0.1
Ver2.4.2 需要JDK7,不然也会报错的
客户端用的JAR版本也要ver2.4.2最好,因为早前用了ver1.0版本就出错了


log4j:WARN No appenders could be found for logger (org.elasticsearch.plugins).
log4j:WARN Please initialize the log4j system properly.
log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.
MapperParsingException[analyzer [ik] not found for field [title]]
	at org.elasticsearch.index.mapper.core.TypeParsers.parseAnalyzersAndTermVectors(TypeParsers.java:213)
	at org.elasticsearch.index.mapper.core.TypeParsers.parseTextField(TypeParsers.java:250)
	at org.elasticsearch.index.mapper.core.StringFieldMapper$TypeParser.parse(StringFieldMapper.java:170)
	at org.elasticsearch.index.mapper.object.ObjectMapper$TypeParser.parseProperties(ObjectMapper.java:309)
	at org.elasticsearch.index.mapper.object.ObjectMapper$TypeParser.parseObjectOrDocumentTypeProperties(ObjectMapper.java:222)
	at org.elasticsearch.index.mapper.object.RootObjectMapper$TypeParser.parse(RootObjectMapper.java:139)
	at org.elasticsearch.index.mapper.DocumentMapperParser.parse(DocumentMapperParser.java:118)
	at org.elasticsearch.index.mapper.DocumentMapperParser.parse(DocumentMapperParser.java:99)
	at org.elasticsearch.index.mapper.MapperService.parse(MapperService.java:549)
	at org.elasticsearch.cluster.metadata.MetaDataMappingService$PutMappingExecutor.applyRequest(MetaDataMappingService.java:257)
	at org.elasticsearch.cluster.metadata.MetaDataMappingService$PutMappingExecutor.execute(MetaDataMappingService.java:230)
	at org.elasticsearch.cluster.service.InternalClusterService.runTasksForExecutor(InternalClusterService.java:480)
	at org.elasticsearch.cluster.service.InternalClusterService$UpdateTask.run(InternalClusterService.java:784)
	at org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor$TieBreakingPrioritizedRunnable.runAndClean(PrioritizedEsThreadPoolExecutor.java:231)
	at org.elasticsearch.common.util.concurrent.PrioritizedEsThreadPoolExecutor$TieBreakingPrioritizedRunnable.run(PrioritizedEsThreadPoolExecutor.java:194)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)
index:test
type:text
id:1
version:1
created:true
搜到0条结果




  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值