现象
elasticsearch升级版本后突现报错:
[2023-07-27T12:57:55,904][WARN ][o.e.x.m.e.l.LocalExporter] [node-2] unexpected error while indexing monitoring document
org.elasticsearch.xpack.monitoring.exporter.ExportException: org.elasticsearch.index.mapper.DocumentParsingException: [-1:12800] failed to parse: Limit of total field
ds [1000] has been exceeded while adding new fields [612]
at org.elasticsearch.xpack.monitoring.exporter.local.LocalBulk.lambda$throwExportException$2(LocalBulk.java:128) ~[?:?]
at java.util.stream.ReferencePipeline$3$1.accept(ReferencePipeline.java:197) ~[?:?]
at java.util.stream.ReferencePipeline$2$1.accept(ReferencePipeline.java:179) ~[?:?]
at java.util.Spliterators$ArraySpliterator.forEachRemaining(Spliterators.java:1006) ~[?:?]
at java.util.stream.AbstractPipeline.copyInto(AbstractPipeline.java:509) ~[?:?]
at java.util.stream.AbstractPipeline.wrapAndCopyInto(AbstractPipeline.java:499) ~[?:?]
at java.util.stream.ForEachOps$ForEachOp.evaluateSequential(ForEachOps.java:150) ~[?:?]
at java.util.stream.ForEachOps$ForEachOp$OfRef.evaluateSequential(ForEachOps.java:173) ~[?:?]
at java.util.stream.AbstractPipeline.evaluate(AbstractPipeline.java:234) ~[?:?]
at java.util.stream.ReferencePipeline.forEach(ReferencePipeline.java:596) ~[?:?]
at org.elasticsearch.xpack.monitoring.exporter.local.LocalBulk.throwExportException(LocalBulk.java:129) ~[?:?]
at org.elasticsearch.xpack.monitoring.exporter.local.LocalBulk.lambda$doFlush$0(LocalBulk.java:110) ~[?:?]
at org.elasticsearch.action.ActionListener$2.onResponse(ActionListener.java:169) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.support.ContextPreservingActionListener.onResponse(ContextPreservingActionListener.java:32) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.client.internal.node.NodeClient$SafelyWrappedActionListener.onResponse(NodeClient.java:160) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.tasks.TaskManager$1.onResponse(TaskManager.java:205) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.tasks.TaskManager$1.onResponse(TaskManager.java:199) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.support.ContextPreservingActionListener.onResponse(ContextPreservingActionListener.java:32) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$applyInternal$2(SecurityActionFilter.java:165) ~[?:?]
at org.elasticsearch.action.ActionListenerImplementations$DelegatingFailureActionListener.onResponse(ActionListenerImplementations.java:152) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.ActionListenerImplementations$RunBeforeActionListener.onResponse(ActionListenerImplementations.java:235) ~[elasticsearch-8.9.0.jar:?]
Caused by: org.elasticsearch.index.mapper.DocumentParsingException: [-1:12800] failed to parse: Limit of total fields [1000] has been exceeded while adding new fields [612]
at org.elasticsearch.index.mapper.DocumentParser.wrapInDocumentParsingException(DocumentParser.java:215) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.index.mapper.DocumentParser.internalParseDocument(DocumentParser.java:122) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.index.mapper.DocumentParser.parseDocument(DocumentParser.java:72) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.index.mapper.DocumentMapper.parse(DocumentMapper.java:78) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.index.shard.IndexShard.prepareIndex(IndexShard.java:1008) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.index.shard.IndexShard.applyIndexOperation(IndexShard.java:956) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.index.shard.IndexShard.applyIndexOperationOnPrimary(IndexShard.java:900) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.bulk.TransportShardBulkAction.executeBulkItemRequest(TransportShardBulkAction.java:354) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.bulk.TransportShardBulkAction$2.doRun(TransportShardBulkAction.java:219) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.bulk.TransportShardBulkAction.performOnPrimary(TransportShardBulkAction.java:286) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.bulk.TransportShardBulkAction.dispatchedShardOperationOnPrimary(TransportShardBulkAction.java:137) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.bulk.TransportShardBulkAction.dispatchedShardOperationOnPrimary(TransportShardBulkAction.java:74) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.support.replication.TransportWriteAction$1.doRun(TransportWriteAction.java:215) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.common.util.concurrent.TimedRunnable.doRun(TimedRunnable.java:33) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.common.util.concurrent.ThreadContext$ContextPreservingAbstractRunnable.doRun(ThreadContext.java:983) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.common.util.concurrent.AbstractRunnable.run(AbstractRunnable.java:26) ~[elasticsearch-8.9.0.jar:?]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1144) ~[?:?]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:642) ~[?:?]
at java.lang.Thread.run(Thread.java:1623) ~[?:?]
[2023-07-27T12:57:55,909][WARN ][o.e.x.m.MonitoringService] [node-2] monitoring execution failed
org.elasticsearch.xpack.monitoring.exporter.ExportException: failed to flush export bulks
at org.elasticsearch.xpack.monitoring.exporter.ExportBulk$Compound.lambda$doFlush$0(ExportBulk.java:110) ~[?:?]
at org.elasticsearch.action.ActionListenerImplementations.safeAcceptException(ActionListenerImplementations.java:60) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.ActionListener$2.onFailure(ActionListener.java:177) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.xpack.monitoring.exporter.local.LocalBulk.throwExportException(LocalBulk.java:135) ~[?:?]
at org.elasticsearch.xpack.monitoring.exporter.local.LocalBulk.lambda$doFlush$0(LocalBulk.java:110) ~[?:?]
at org.elasticsearch.action.ActionListener$2.onResponse(ActionListener.java:169) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.support.ContextPreservingActionListener.onResponse(ContextPreservingActionListener.java:32) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.client.internal.node.NodeClient$SafelyWrappedActionListener.onResponse(NodeClient.java:160) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.tasks.TaskManager$1.onResponse(TaskManager.java:205) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.tasks.TaskManager$1.onResponse(TaskManager.java:199) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.action.support.ContextPreservingActionListener.onResponse(ContextPreservingActionListener.java:32) ~[elasticsearch-8.9.0.jar:?]
at org.elasticsearch.xpack.security.action.filter.SecurityActionFilter.lambda$applyInternal$2(SecurityActionFilter.java:165) ~[?:?]
at org.elasticsearch.action.ActionListenerImplementations$DelegatingFailureActionListener.onResponse(ActionListenerImplementations.java:152) ~[elasticsearch-8.9.0.jar:?]
很明显是fileld限制的小了,需要增大,但问题是它没有指定是哪个index。网上的解决办法都是指定index的。
把google都翻烂了,最终在这里找到了答案 地址
试了一试,丫的还真行
PUT .monitoring-*/_settings
{
"index.mapping.total_fields.limit": 2000
}