C#微服务日志分析与可视化全栈实战
🔧 模块1:零代码日志收集——“Serilog的量子纠缠”
1.1 Serilog配置与结构化日志
// Program.cs:配置Serilog作为日志核心
using Serilog;
using Serilog.Events;
var logger = new LoggerConfiguration()
// 输出到控制台(开发环境)
.WriteTo.Console(outputTemplate: "[{Timestamp:HH:mm:ss} {Level}] {Message}{NewLine}{Exception}")
// 输出到Elasticsearch(生产环境)
.WriteTo.Elasticsearch(new ElasticsearchSinkOptions(new Uri("http://localhost:9200"))
{
AutoRegisterTemplate = true,
IndexFormat = "csharp-microservice-logs-{0:yyyy.MM.dd}",
TypeName = "_doc",
// 关键:启用结构化日志
EmitEventFailure = EmitEventFailureHandling.Throw_EXCEPTION,
// 自定义日志字段(如请求ID)
CustomFormatter = new CompactJsonFormatter(renderMessage: true)
})
// 设置日志级别
.MinimumLevel.Override("Microsoft", LogEventLevel.Warning)
.Enrich.FromLogContext() // 用于注入请求ID等上下文
.CreateLogger();
Log.Logger = logger;
1.2 请求ID与分布式追踪
// Middleware.cs:注入请求ID到日志上下文
public class RequestIdMiddleware
{
private readonly RequestDelegate _next;
public RequestIdMiddleware(RequestDelegate next)
{
_next = next;
}
public async Task InvokeAsync(HttpContext context)
{
// 生成唯一请求ID
var requestId = Guid.NewGuid().ToString();
context.Request.Headers.Add("X-Request-ID", requestId);
// 将请求ID注入日志上下文
using (LogContext.PushProperty("RequestId", requestId))
{
await _next(context);
}
}
}
// 使用示例:记录带请求ID的日志
public class OrderService
{
private readonly ILogger<OrderService> _logger;
public OrderService(ILogger<OrderService> logger)
{
_logger = logger;
}
public void ProcessOrder(int orderId)
{
try
{
_logger.LogInformation("Processing order {OrderId} for request {RequestId}", orderId, LogContext.Properties["RequestId"]);
// ...业务逻辑...
}
catch (Exception ex)
{
_logger.LogError(ex, "Error processing order {OrderId}", orderId);
}
}
}
1.3 日志格式化与敏感数据过滤
// 自定义日志格式(JSON结构化)
public class CustomJsonFormatter : ILogEventFormatter
{
public void Format(ILogEventPropertyValue value, TextWriter writer)
{
// 过滤敏感字段(如密码)
if (value is ScalarValue scalar && scalar.Value.ToString().Contains("password"))
{
writer.Write($"\"{scalar.Value}\": \"FILTERED\"");
}
else
{
writer.Write(value.RenderPlain());
}
}
}
// 配置日志格式(在Program.cs中)
.LogEventFormatter(new CustomJsonFormatter())
🔥 模块2:ELK全栈实战——“Elasticsearch的核爆级存储”
2.1 Elasticsearch索引模板
// elasticsearch-template.json:定义索引模板
{
"index_patterns": ["csharp-microservice-logs-*"],
"settings": {
"number_of_shards": 3,
"number_of_replicas": 1
},
"mappings": {
"properties": {
"@timestamp": { "type": "date" },
"message": { "type": "text" },
"level": { "type": "keyword" },
"requestId": { "type": "keyword" },
"exception": { "type": "text" },
"properties": {
"properties": {
"serviceName": { "type": "keyword" },
"environment": { "type": "keyword" }
}
}
}
}
}
2.2 Logstash配置与日志解析
# logstash.conf:解析非结构化日志
input {
beats {
port => 5044
}
}
filter {
# 解析JSON日志
json {
source => "message"
}
# 添加环境标签
if [environment] == "prod" {
mutate {
add_tag => ["production"]
}
}
}
output {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "csharp-microservice-logs-%{+YYYY.MM.dd}"
}
}
2.3 Kibana仪表盘与告警规则
// Kibana仪表盘配置示例(通过API创建)
POST .kibana/_doc/visualization:order_error_rate
{
"type": "metric",
"params": {
"metrics": [
{
"id": "1",
"type": "count",
"schema": "metric"
}
],
"bucket": {
"type": "terms",
"schema": "segment",
"field": "level",
"size": 5
}
}
}
🌟 模块3:分布式追踪——“OpenTelemetry的时空穿越”
3.1 OpenTelemetry集成
// Program.cs:集成OpenTelemetry
using OpenTelemetry;
using OpenTelemetry.Exporter;
using OpenTelemetry.Resources;
using OpenTelemetry.Trace;
var tracerProvider = Sdk.CreateTracerProviderBuilder()
.SetResourceBuilder(ResourceBuilder.CreateDefault().AddService("OrderService"))
.AddConsoleExporter() // 输出到控制台
.AddOtlpExporter(options =>
{
options.Endpoint = new Uri("http://jaeger:14250");
})
.AddSource("CSharpMicroservice")
.Build();
// 使用示例:追踪HTTP请求
public class OrderController : ControllerBase
{
private readonly Tracer _tracer;
public OrderController(Tracer tracer)
{
_tracer = tracer;
}
[HttpGet("{id}")]
public async Task<IActionResult> GetOrder(int id)
{
using var span = _tracer.StartActiveSpan("GetOrder");
span.SetAttribute("orderId", id);
// 模拟网络延迟
await Task.Delay(500);
if (id == 404)
span.RecordException(new Exception("Order not found"));
return Ok($"Order {id} processed");
}
}
3.2 Jaeger可视化配置
# jaeger-config.yml:配置Jaeger收集器
server:
healthCheck:
path: /debug/healthz
collector:
zipkin:
http:
endpoint: /api/v2/spans
query:
metrics:
enabled: true
🔥 模块4:性能优化——“高并发下的日志核爆”
4.1 日志批量处理与压缩
// Program.cs:配置Serilog批量处理
.WriteTo.Elasticsearch(new ElasticsearchSinkOptions(new Uri("http://localhost:9200"))
{
BatchActionBaseSize = 1000, // 批量大小
QueueSize = 10000, // 内存队列大小
FailureCallback = e => Console.WriteLine($"Failed to send log: {e.Message}"),
// 启用GZIP压缩
CompressionLevel = CompressionLevel.Optimal
})
4.2 冷热数据分离
// Elasticsearch索引生命周期策略
PUT _ilm/policy/cold-warm-hot
{
"policy": {
"phases": {
"hot": {
"min_age": "0ms",
"actions": {
"set_priority": { "priority": 100 }
}
},
"warm": {
"min_age": "7d",
"actions": {
"set_priority": { "priority": 50 },
"allocate": { "require": { "node.role": "data_warm" } }
}
},
"cold": {
"min_age": "30d",
"actions": {
"freeze": {}
}
}
}
}
}
🔥 模块5:实战案例——电商系统的“全栈核武器级”方案
5.1 电商服务日志结构
{
"@timestamp": "2025-04-20T09:11:56Z",
"level": "INFO",
"message": "Order processed successfully",
"requestId": "123e4567-e89b-12d3-a456-426614174000",
"properties": {
"serviceName": "OrderService",
"environment": "prod"
},
"userId": "user123",
"orderItems": [
{"productId": 1001, "quantity": 2},
{"productId": 1002, "quantity": 1}
]
}
5.2 Kibana仪表盘配置
// 仪表盘JSON(通过Kibana导出)
{
"title": "E-Commerce Monitoring",
"hits": 100,
"panelsRowHeight": "medium",
"options": {
"useMargins": true
},
"panels": [
{
"type": "metric",
"id": "1",
"panelIndex": 0,
"gridData": {
"x": 0,
"y": 0,
"w": 6,
"h": 5
},
"params": {
"metrics": [
{
"id": "1",
"type": "count",
"schema": "metric"
}
],
"bucket": {
"type": "terms",
"schema": "segment",
"field": "level",
"size": 5
}
}
},
{
"type": "time_series",
"id": "2",
"panelIndex": 1,
"gridData": {
"x": 6,
"y": 0,
"w": 6,
"h": 5
},
"params": {
"metrics": [
{
"id": "2",
"type": "avg",
"field": "responseTime",
"schema": "metric"
}
],
"bucket": {
"type": "date_histogram",
"interval": "auto",
"field": "@timestamp"
}
}
}
]
}
🔑 核心技巧:日志系统的“核爆”优化
6.1 日志安全与加密
// 使用TLS加密Elasticsearch通信
.WriteTo.Elasticsearch(new ElasticsearchSinkOptions(new Uri("https://localhost:9200"))
{
// 配置SSL验证
ConfigureConnection = (serviceConfiguration) =>
{
serviceConfiguration.EnableHttpCompression = true;
serviceConfiguration.ConnectionSettings.BasicAuthentication("username", "password");
}
})
6.2 日志回滚与灾难恢复
# 使用Elasticsearch快照恢复
PUT _snapshot/my_backup
{
"type": "fs",
"settings": {
"location": "/mnt/elasticsearch/snapshots",
"compress": true
}
}
PUT _snapshot/my_backup/snapshot_1?wait_for_completion=true
{
"indices": "csharp-microservice-logs-*",
"include_global_state": false
}
6.3 云原生日志集成
# Kubernetes DaemonSet部署Filebeat
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: filebeat
spec:
selector:
matchLabels:
app: filebeat
template:
metadata:
labels:
app: filebeat
spec:
containers:
- name: filebeat
image: docker.elastic.co/beats/filebeat:8.5.3
args: [
"-c", "/etc/filebeat/filebeat.yml",
"-e"
]
volumeMounts:
- name: config
mountPath: /etc/filebeat/filebeat.yml
subPath: filebeat.yml
- name: data
mountPath: /usr/share/filebeat/data
volumes:
- name: config
configMap:
name: filebeat-config
🌐 模块6:跨平台与云服务集成
6.1 与Azure Monitor集成
// 使用Azure Monitor日志
.WriteTo.AzureMonitorLog(
instrumentationKey: "your-instrumentation-key",
logLevel: LogEventLevel.Information
)
6.2 与AWS CloudWatch集成
// 使用AWS CloudWatch日志
.WriteTo.CloudWatch(
logGroup: "CSharpMicroservices",
region: RegionEndpoint.USWest2
)
🌌 方案对比与选择指南
模块 | 技术选型 | 性能 | 适用场景 | 扩展性 |
---|---|---|---|---|
日志收集 | Serilog + Filebeat | 高 | 高并发、分布式系统 | 支持多云部署 |
日志分析 | Elasticsearch + Kibana | 极高 | 实时监控与复杂查询 | 支持PB级数据 |
追踪系统 | OpenTelemetry + Jaeger | 中高 | 分布式请求链路追踪 | 支持多语言 |
存储优化 | 冷热数据分离策略 | 高 | 长期存储与成本控制 | 自动化策略管理 |
云集成 | Azure Monitor/AWS CloudWatch | 中 | 云原生环境快速部署 | 无缝对接云服务 |
通过 六大核心模块,你现在能:
- 零代码日志收集:用Serilog实现结构化日志与请求ID追踪
- ELK全栈实战:Elasticsearch的高效存储与Kibana的实时可视化
- 分布式追踪:OpenTelemetry的跨服务请求链路分析
- 性能优化:批量处理、冷热分离与加密传输
- 全栈实战:电商系统的“核武器级”日志监控方案
1. 电商服务日志配置
// Program.cs:完整日志配置
using Serilog;
using Serilog.Events;
var logger = new LoggerConfiguration()
.Enrich.FromLogContext()
.Enrich.WithProperty("Environment", "prod")
.WriteTo.Console()
.WriteTo.Elasticsearch(new ElasticsearchSinkOptions(new Uri("http://elasticsearch:9200"))
{
IndexFormat = "ecommerce-logs-{0:yyyy.MM.dd}",
AutoRegisterTemplate = true,
BatchActionBaseSize = 500,
ModifyConnectionSettings = x => x.SetBasicAuthentication("user", "pass")
})
.MinimumLevel.Override("Microsoft", LogEventLevel.Warning)
.CreateLogger();
Log.Logger = logger;
2. OpenTelemetry与Jaeger集成
// Program.cs:OpenTelemetry配置
using var tracerProvider = Sdk.CreateTracerProviderBuilder()
.AddSource("ECommerceApp")
.AddJaegerExporter(options =>
{
options.AgentEndpoint = new Uri("http://jaeger:14268/api/traces");
options.Process = new Process()
{
ServiceName = "ECommerceService"
};
})
.Build();
3. Kibana仪表盘API创建脚本
// 创建订单错误率仪表盘
POST /_kibana/api/saved_objects/visualization
{
"attributes": {
"title": "Order Error Rate",
"type": "lens",
"kibanaSavedObjectMeta": {
"searchSourceJSON": `{
"index": "ecommerce-logs-*",
"query": {
"bool": {
"must": [
{ "match_all": {} },
{ "range": { "@timestamp": { "gte": "now-1h" } } }
]
}
},
"aggs": {
"2": {
"terms": {
"field": "level.keyword",
"size": 5,
"order": { "_count": "desc" }
}
}
}
}`
}
}
}