add 完成 ELK 整合

2.X
疯狂的狮子li 3 years ago
parent 93e52ae6ac
commit fae0df1905

@ -43,7 +43,7 @@
| 分布式搜索引擎(未完成) | ElasticSearch | [ElasticSearch官网](https://www.elastic.co/cn/elasticsearch/) | 业界知名 |
| 分布式数据同步(未完成) | Alibaba Canal | [Alibaba Canal官网](https://github.com/alibaba/canal/wiki) | 采集数据同步各种数据库 ES Redis Mysql |
| 分布式链路追踪(未完成) | Apache SkyWalking | [Apache SkyWalking文档](https://skywalking.apache.org/docs/) | 链路追踪、网格分析、度量聚合、可视化 |
| 分布式日志中心(未完成) | ELK | [ElasticSearch官网](https://www.elastic.co/cn/elasticsearch/) | ELK业界成熟解决方案 |
| 分布式日志中心 | ELK | [ElasticSearch官网](https://www.elastic.co/cn/elasticsearch/) | ELK业界成熟解决方案 |
| 分布式锁 | Lock4j | [Lock4j官网](https://gitee.com/baomidou/lock4j) | 注解锁、工具锁 多种多样 |
| 分布式幂等 | Redisson | [Lock4j文档](https://gitee.com/baomidou/lock4j) | 拦截重复提交 |
| 分布式任务调度 | Xxl-Job | [Xxl-Job官网](https://www.xuxueli.com/xxl-job/) | 高性能 高可靠 易扩展 |

@ -261,3 +261,53 @@ services:
- /docker/ruoyi-resource/logs/:/ruoyi/resource/logs
privileged: true
network_mode: "host"
#################################################################################################
#################################### 以下为扩展根据需求搭建 #########################################
#################################################################################################
elasticsearch:
image: elasticsearch:7.17.2
container_name: elk_elasticsearch
ports:
- "9200:9200"
- "9300:9300"
environment:
# 设置集群名称
cluster.name: elasticsearch
# 以单一节点模式启动
discovery.type: single-node
ES_JAVA_OPTS: "-Xms512m -Xmx512m"
volumes:
- /docker/elk/elasticsearch/plugins:/usr/share/elasticsearch/plugins
- /docker/elk/elasticsearch/data:/usr/share/elasticsearch/data
network_mode: "host"
kibana:
image: kibana:7.17.2
container_name: elk_kibana
ports:
- "5601:5601"
depends_on:
# kibana在elasticsearch启动之后再启动
- elasticsearch
environment:
# 设置访问elasticsearch的地址
ELASTICSEARCH_URL: http://127.0.0.1:9200
#设置系统语言文中文
I18N_LOCALE: zh-CN
# 访问域名
# SERVER_PUBLICBASEURL: https://kibana.cloud.com
network_mode: "host"
logstash:
image: logstash:7.17.2
container_name: elk_logstash
ports:
- "4560:4560"
volumes:
- /docker/elk/logstash/logstash.conf:/usr/share/logstash/pipeline/logstash.conf
depends_on:
- elasticsearch
network_mode: "host"

@ -0,0 +1 @@
ES 数据目录 请执行 `chmod 777 /docker/elk/elasticsearch/data` 赋予读写权限 否则 ES 将无法写入数据

@ -0,0 +1 @@
ES 插件目录 扩展插件可以放入此目录下

@ -0,0 +1,14 @@
input {
tcp {
mode => "server"
host => "0.0.0.0"
port => 4560
codec => json_lines
}
}
output {
elasticsearch {
hosts => "127.0.0.1:9200"
index => "%{[spring.application.name]}-%{+YYYY.MM.dd}"
}
}

@ -38,6 +38,7 @@
<knife4j-aggregation.version>2.0.9</knife4j-aggregation.version>
<knife4j.version>3.0.3</knife4j.version>
<satoken.version>1.30.0</satoken.version>
<logstash.version>7.1.1</logstash.version>
<!-- 统一 guava 版本 解决隐式漏洞问题 -->
<guava.version>30.0-jre</guava.version>
@ -66,6 +67,7 @@
<nacos.server>127.0.0.1:8848</nacos.server>
<nacos.discovery.group>DEFAULT_GROUP</nacos.discovery.group>
<nacos.config.group>DEFAULT_GROUP</nacos.config.group>
<logstash.address>127.0.0.1:4560</logstash.address>
</properties>
<activation>
<!-- 默认环境 -->
@ -79,6 +81,7 @@
<nacos.server>127.0.0.1:8848</nacos.server>
<nacos.discovery.group>DEFAULT_GROUP</nacos.discovery.group>
<nacos.config.group>DEFAULT_GROUP</nacos.config.group>
<logstash.address>127.0.0.1:4560</logstash.address>
</properties>
</profile>
</profiles>
@ -240,6 +243,13 @@
<version>${xxl-job.version}</version>
</dependency>
<!-- logstash -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>${logstash.version}</version>
</dependency>
<dependency>
<groupId>com.squareup.okhttp3</groupId>
<artifactId>okhttp</artifactId>

@ -71,6 +71,12 @@
<!-- <dependency>-->
<!-- <groupId>com.ruoyi</groupId>-->
<!-- <artifactId>ruoyi-common-loadbalancer</artifactId>-->
<!-- </dependency>-->
<!-- ELK 日志收集 -->
<!-- <dependency>-->
<!-- <groupId>com.ruoyi</groupId>-->
<!-- <artifactId>ruoyi-common-logstash</artifactId>-->
<!-- </dependency>-->
</dependencies>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

@ -29,6 +29,7 @@
<module>ruoyi-common-idempotent</module>
<module>ruoyi-common-mail</module>
<module>ruoyi-common-sms</module>
<module>ruoyi-common-logstash</module>
</modules>
<artifactId>ruoyi-common</artifactId>

@ -132,6 +132,12 @@
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.ruoyi</groupId>
<artifactId>ruoyi-common-logstash</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</dependencyManagement>
</project>

@ -0,0 +1,24 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xmlns="http://maven.apache.org/POM/4.0.0"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<parent>
<groupId>com.ruoyi</groupId>
<artifactId>ruoyi-common</artifactId>
<version>1.0.0</version>
</parent>
<modelVersion>4.0.0</modelVersion>
<artifactId>ruoyi-common-logstash</artifactId>
<description>
ruoyi-common-logstash logstash日志推送模块
</description>
<dependencies>
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
</dependency>
</dependencies>
</project>

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="UTF-8"?>
<included>
<springProperty scope="context" name="appName" source="spring.application.name"/>
<!--输出到logstash的appender-->
<appender name="logstash" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!--可以访问的logstash日志收集端口-->
<destination>${logstash.address}</destination>
<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">
<customFields>{"spring.application.name":"${appName}"}</customFields>
</encoder>
</appender>
<root level="info">
<appender-ref ref="logstash"/>
</root>
</included>

@ -108,6 +108,12 @@
<!-- <artifactId>ruoyi-common-loadbalancer</artifactId>-->
<!-- </dependency>-->
<!-- ELK 日志收集 -->
<!-- <dependency>-->
<!-- <groupId>com.ruoyi</groupId>-->
<!-- <artifactId>ruoyi-common-logstash</artifactId>-->
<!-- </dependency>-->
</dependencies>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

@ -27,6 +27,12 @@
<!-- <dependency>-->
<!-- <groupId>com.ruoyi</groupId>-->
<!-- <artifactId>ruoyi-common-loadbalancer</artifactId>-->
<!-- </dependency>-->
<!-- ELK 日志收集 -->
<!-- <dependency>-->
<!-- <groupId>com.ruoyi</groupId>-->
<!-- <artifactId>ruoyi-common-logstash</artifactId>-->
<!-- </dependency>-->
</dependencies>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />

@ -23,4 +23,12 @@
ruoyi-visual图形化管理模块
</description>
<dependencies>
<!-- ELK 日志收集 -->
<!-- <dependency>-->
<!-- <groupId>com.ruoyi</groupId>-->
<!-- <artifactId>ruoyi-common-logstash</artifactId>-->
<!-- </dependency>-->
</dependencies>
</project>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console" />

@ -29,8 +29,6 @@
<properties>
<seata.version>1.5.1</seata.version>
<jcommander.version>1.72</jcommander.version>
<logstash-logback-encoder.version>6.5</logstash-logback-encoder.version>
<kafka-appender.version>0.2.0-RC2</kafka-appender.version>
</properties>
<dependencyManagement>
@ -151,17 +149,7 @@
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
<!-- logback appenders -->
<dependency>
<groupId>net.logstash.logback</groupId>
<artifactId>logstash-logback-encoder</artifactId>
<version>${logstash-logback-encoder.version}</version>
</dependency>
<dependency>
<groupId>com.github.danielwegener</groupId>
<artifactId>logback-kafka-appender</artifactId>
<version>${kafka-appender.version}</version>
</dependency>
</dependencies>
<build>

@ -121,22 +121,12 @@
<appender-ref ref="file_error"/>
</appender>
<!-- logstash-appender: off by default -->
<!--<include resource="logback/logstash-appender.xml"/>-->
<!-- kafka-appender: off by default -->
<!--<include resource="logback/kafka-appender.xml"/>-->
<include resource="logback-logstash.xml" />
<root level="INFO">
<appender-ref ref="console"/>
<appender-ref ref="async_info"/>
<appender-ref ref="async_error"/>
<appender-ref ref="file_console"/>
<!-- logstash-appender: off by default -->
<!--<appender-ref ref="LOGSTASH"/>-->
<!-- kafka-appender: off by default -->
<!--<appender-ref ref="KAFKA"/>-->
</root>
</configuration>

@ -1,34 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<included>
<!-- kafka-appender properties -->
<springProperty name="KAFKA_BOOTSTRAP_SERVERS" source="logging.extend.kafka-appender.bootstrap-servers"
defaultValue="127.0.0.1:9092"/>
<springProperty name="KAFKA_TOPIC" source="logging.extend.kafka-appender.topic"
defaultValue="logback_to_logstash"/>
<appender name="KAFKA" class="com.github.danielwegener.logback.kafka.KafkaAppender">
<encoder>
<!--<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS}|%p|${APPLICATION_NAME:-seata-server}|${RPC_PORT:-0}|%t|%logger|%X{X-TX-XID:-}|%X{X-TX-BRANCH-ID:-}|%m|%wex</pattern>-->
<pattern>{
"@timestamp": "%d{yyyy-MM-dd HH:mm:ss.SSS}",
"level":"%p",
"app_name":"${APPLICATION_NAME:-seata-server}",
"PORT": ${RPC_PORT:-0},
"thread_name": "%t",
"logger_name": "%logger",
"X-TX-XID": "%X{X-TX-XID:-}",
"X-TX-BRANCH-ID": "%X{X-TX-BRANCH-ID:-}",
"message": "%m",
"stack_trace": "%wex"
}
</pattern>
</encoder>
<topic>${KAFKA_TOPIC}</topic>
<keyingStrategy class="com.github.danielwegener.logback.kafka.keying.NoKeyKeyingStrategy"/>
<deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy"/>
<producerConfig>bootstrap.servers=${KAFKA_BOOTSTRAP_SERVERS}</producerConfig>
<producerConfig>acks=0</producerConfig>
<producerConfig>linger.ms=1000</producerConfig>
<producerConfig>max.block.ms=0</producerConfig>
</appender>
</included>

@ -1,29 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<included>
<!-- logstash-appender properties -->
<springProperty name="LOGSTASH_DESTINATION" source="logging.extend.logstash-appender.destination"
defaultValue="127.0.0.1:4560"/>
<appender name="LOGSTASH" class="net.logstash.logback.appender.LogstashTcpSocketAppender">
<!-- the TCP address of the logstash -->
<destination>${LOGSTASH_DESTINATION}</destination>
<!--<encoder charset="UTF-8" class="net.logstash.logback.encoder.LogstashEncoder">-->
<encoder charset="UTF-8" class="io.seata.server.logging.logback.appender.EnhancedLogstashEncoder">
<!-- the global custom fields -->
<customFields>
{
"app_name": "${APPLICATION_NAME:-seata-server}"
}
</customFields>
<!-- Exclude the provider of data `@version` -->
<excludeProvider>net.logstash.logback.composite.LogstashVersionJsonProvider</excludeProvider>
<!-- Exclude providers that are not currently needed, reduce some performance loss. -->
<excludeProvider>net.logstash.logback.composite.loggingevent.JsonMessageJsonProvider</excludeProvider>
<excludeProvider>net.logstash.logback.composite.loggingevent.TagsJsonProvider</excludeProvider>
<excludeProvider>net.logstash.logback.composite.loggingevent.LogstashMarkersJsonProvider</excludeProvider>
<excludeProvider>net.logstash.logback.composite.loggingevent.ArgumentsJsonProvider</excludeProvider>
</encoder>
</appender>
</included>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

@ -99,6 +99,8 @@
<appender-ref ref="file_error"/>
</appender>
<include resource="logback-logstash.xml" />
<!--系统操作日志-->
<root level="info">
<appender-ref ref="console"/>

Loading…
Cancel
Save