当前位置: 首页 > news >正文

docker 部署 filebeat 采集日志导入到elasticsearch 设置pipeline

filebeat.yml

filebeat.inputs:- type: log #默认log,从日志文件读取每一行。stdin,从标准输入读取paths:- /root/logs/*/*.logmultiline.pattern: '^\d{4}\-\d{2}\-\d{2}' #匹配的正则multiline.negate: true #多行匹配模式后配置的模式是否取反,默认falsemultiline.match: after #定义多行内容被添加到模式匹配行之后还是之前,默认无,可以被设置为after或者beforefields:index: 'server_log'setup.ilm.enabled: false
setup.template.name: "java_logback_service_index_template"
setup.template.pattern: "java_logback_service_index_template-*"
setup.template.overwrite: true
setup.template.settings:index.number_of_shards: 1output.elasticsearch:hosts: ["110.238.107.151:9001"]indices:- index: "server_log-%{+yyyy.MM.dd}"when.contains:fields:index: "server_log"pipeline: "test_java_log_pipeline"document_type: log #该type会被添加到type字段,对于输出到ES来说,这个输入时的type字段会被存储,默认logmax_retries: 3 #ES重试次数,默认3次,超过3次后,当前事件将被丢弃processors: - drop_fields:fields: ["log","host","input","agent","ecs","start_time"]

docker run

docker run -itd \--privileged=true \--user=root \--name=filebeat \--restart always \--network=my_network \-v /root/filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml:rw \-v /root/filebeat/data/:/usr/share/filebeat/data/:rw  \-v /root/logs/:/root/logs/:rw  \-v /root/xiaoye_worker/logs/:/root/xiaoye_worker/logs/:rw  \docker.elastic.co/beats/filebeat:6.4.2

Elasticsearch 模版和pipeline 设置

GET _template/java_logback_service_index_template
DELETE _template/java_logback_service_index_template
# 创建模版索引
PUT _template/java_logback_service_index_template
{"order": 1,"index_patterns": ["java_log-*"],"settings": {"number_of_shards": 1,"number_of_replicas": 1},"mappings": {"type_name":{"properties": {"app_name": {"type": "keyword"},"trance_id": {"type": "keyword"},"log_level": {"type": "keyword"},"thread": {"type": "keyword"},"class_line": {"type": "keyword"},"message": {"type": "text","analyzer": "ik_max_word","search_analyzer": "ik_smart","norms": false},"timestamp": {"type": "date"}}}},"aliases": {}
}GET _ingest/pipeline/test_java_log_pipelineDELETE /_ingest/pipeline/test_java_log_pipeline
# 设置自定义处理
PUT /_ingest/pipeline/test_java_log_pipeline
{"description": "test_java_log_pipeline","processors": [{"grok": {"field": "message","patterns": ["""%{TIMESTAMP_ISO8601:timestamp} %{DATA:app_name} %{LOGLEVEL:log_level} %{DATA:thread} \[%{DATA:trance_id}\] %{DATA:class_line} %{GREEDYDATA:message}"""],"pattern_definitions": {"ALL_CODE": "(\n)*"}},"remove": {"field": "@timestamp"}},{"date": {"field": "timestamp","formats": ["yyyy-MM-dd HH:mm:ss.SSS"],"timezone": "Asia/Shanghai","target_field": "timestamp"},"remove": {"field": "@timestamp"}}]
}

测试 gork

# 测试 gork
POST _ingest/pipeline/_simulate
{"pipeline": {"description": "timestamp pipeline","processors": [{"grok": {"field": "message","patterns": ["""%{TIMESTAMP_ISO8601:timestamp} %{DATA:app_name} %{LOGLEVEL:log_level} %{DATA:thread} \[%{DATA:trance_id}\] %{DATA:class_line} %{GREEDYDATA:message}"""]}},{"date": {"field": "timestamp","formats": ["yyyy-MM-dd HH:mm:ss.SSS"],"timezone": "Asia/Shanghai","target_field": "create_time"},"remove": {"field": "timestamp"}}]},"docs": [{"_index": "syne_sys_log","_id": "id","_source": {"message": "2024-10-02 21:11:20.083 xiaoye-scheduler INFO  scheduling-1 [] com.xiaoye.orion.scheduler.service.SchedulerService:83 now: 2024-10-02T21:11:20.083, size: 0, id:1727874680011, startTime:1727874680011"}}]
}

Entity

@Data
@SuperBuilder
@NoArgsConstructor
@AllArgsConstructor
@JsonInclude(JsonInclude.Include.NON_EMPTY)
@JsonIgnoreProperties(ignoreUnknown = true)
@Document(indexName = "server_log-*")
public class ServerLogEs {@Field(name = "log_level", type = FieldType.Keyword)@JsonProperty("log_level")private String logLevel;@Field(name = "thread", type = FieldType.Keyword)private String thread;@Field(name = "app_name", type = FieldType.Keyword)@JsonProperty("app_name")private String appName;@Field(name = "trance_id", type = FieldType.Keyword)@JsonProperty("trance_id")private String tranceId;@Field(name = "class_line", type = FieldType.Keyword)@JsonProperty("class_line")private String classLine;@Field(type = FieldType.Text, analyzer = "ik_smart", searchAnalyzer = "ik_smart")private String message;@Field(name = "@create_time")@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss", timezone = "GMT+8")@DateTimeFormat(pattern = "yyyy-MM-dd HH:mm:ss")private Date createTime;
}

logback.xml

<?xml version="1.0" encoding="UTF-8" ?>
<configuration><springProperty scope="context" name="logPath" source="log.path" defaultValue="/root/logs/${APP_NAME}/"/><!--获取服务名称--><springProperty scope="context" name="APP_NAME" source="spring.application.name" defaultValue="xiaoye-admin"/><include resource="org/springframework/boot/logging/logback/defaults.xml"/><jmxConfigurator/><appender name="consoleLog" class="ch.qos.logback.core.ConsoleAppender"><layout class="ch.qos.logback.classic.PatternLayout"><pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n</pattern></layout></appender><!--    根据需要,是否输出到文件--><appender name="fileInfoLog" class="ch.qos.logback.core.rolling.RollingFileAppender"><filter class="ch.qos.logback.classic.filter.LevelFilter"><level>ERROR</level><onMatch>DENY</onMatch><onMismatch>ACCEPT</onMismatch></filter><encoder><pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n</pattern><immediateFlush>false</immediateFlush></encoder><rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"><fileNamePattern>${logPath}/info.%d{yyyy-MM-dd}.log</fileNamePattern><maxHistory>7</maxHistory></rollingPolicy></appender><appender name="fileErrorLog" class="ch.qos.logback.core.rolling.RollingFileAppender"><filter class="ch.qos.logback.classic.filter.ThresholdFilter"><level>ERROR</level></filter><encoder><pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} ${APP_NAME} %-5level %thread [%X{traceId}] %class:%line %msg%n</pattern></encoder><rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy"><fileNamePattern>${logPath}/error.%d{yyyy-MM-dd}.log</fileNamePattern><maxHistory>7</maxHistory></rollingPolicy></appender><logger name="org.springframework" level="ERROR"/><logger name="org.xnio" level="ERROR"/><logger name="io.undertow" level="ERROR"/><logger name="com.netflix.discovery" level="ERROR"/><!--    <springProfile name="dev,test">--><!--        <root level="info">--><!--            <appender-ref ref="consoleLog"/>--><!--            <appender-ref ref="fileInfoLog"/>--><!--            <appender-ref ref="fileErrorLog"/>--><!--        </root>--><!--    </springProfile>--><!--    异步日志输出,对于量大的日志,能够明显提升性能,但有延迟--><!--    <appender name="asyncLog" class="ch.qos.logback.classic.AsyncAppender">--><!--        <discardingThreshold>0</discardingThreshold>--><!--        <queueSize>100</queueSize>--><!--        <appender-ref ref="fileInfoLog"/>--><!--    </appender>--><!--    <springProfile name="prod">--><!--        <root level="info">--><!--            <appender-ref ref="asyncLog"/>--><!--            <appender-ref ref="fileErrorLog"/>--><!--        </root>--><!--    </springProfile>--><root level="info"><appender-ref ref="consoleLog"/><appender-ref ref="fileInfoLog"/><appender-ref ref="fileErrorLog"/></root></configuration>

http://www.mrgr.cn/news/41460.html

相关文章:

  • ADRC与INDI的关系
  • 过滤器 Filter 详解
  • C++【类和对象】(再探构造函数、类型转换与static成员)
  • 如何选择与运用编程工具提升工作效率的秘密武器
  • 基于物理信息神经网络(PINN)求解Burgers方程(附PyTorch源代码)
  • 进程和线程之间的通用方式
  • [20241002] OpenAI融资文件曝光,ChatGPT年收入涨4倍,月费5年内翻倍
  • OpenGL笔记十九之相机系统
  • WSL--安装各种软件包
  • CompletableFuture常用方法
  • 计算机网络思维导图
  • 【微服务】组件、基础工程构建(day2)
  • C++中substr用法记录
  • 云原生(四十一)| 阿里云ECS服务器介绍
  • 什么是 Supply chain attack(供应链攻击)
  • 差分基准站
  • MySQL高阶2051-商店中每个成员的级别
  • Blazor开发框架Known-V2.0.13
  • JavaWeb
  • 基于深度学习的乳腺癌分类识别与诊断系统