https://hub.docker.com/_/elasticsearch/
https://www.docker.elastic.co/r/elasticsearch
https://hub.docker.com/r/sebp/elk/
# docker run
docker run -d -p 9200:9200 -p 9300:9300 --name elasticsearch --restart=always -v /etc/localtime:/etc/localtime:ro -v /data/site/docker/env/monitor/elasticsearch/elasticsearch-6.ini:/usr/share/elasticsearch/config/elasticsearch.yml:ro -v /data/file/logs/elasticsearch:/usr/share/elasticsearch/logs elasticsearch:6.8.23
docker run -d -p 5601:5601 --name kibana --restart=always -v /etc/localtime:/etc/localtime:ro -v /data/site/docker/env/monitor/kibana/kibana-6.ini:/usr/share/kibana/config/kibana.yml:ro -v /data/docker/monitor/kibana/zh-cn:/opt kibana:6.8.23
#清除日志(解决升级,重启会异常)
rm -rf /data/file/logs/elasticsearch/*
vim /data/docker/monitor/elasticsearch/elasticsearch-6.ini
cluster.name: cluster
node.name: 01
network.host: 0.0.0.0
discovery.zen.ping.unicast.hosts: 127.0.0.1
http.port: 9200
http.cors.enabled: true
http.cors.allow-origin: "*"
vim /data/docker/monitor/kibana/kibana-6.ini
server.host: "0.0.0.0"
elasticsearch.hosts: "https://abc.htmltoo.com/:9200"
kibana.index: ".kibana"
i18n.locale: "zh-CN"
xpack.security.encryptionKey: "98XVHeRM9KqGpghV2YXHiS4K1b4ZdULn"
xpack.reporting.encryptionKey: "RrqAFUSwBztgdzsKgHnFyREv04mUyWOF"
elasticsearch:6.8.12
安装中文分词插件: https://github.com/medcl/elasticsearch-analysis-ik/releases
bin/elasticsearch-plugin install https://github.com/medcl/elasticsearch-analysis-ik/releases/download/v6.8.12/elasticsearch-analysis-ik-6.8.12.zip
测试ik分词器:
curl -XGET -H 'Content-Type: application/json' 'http://localhost:9200/_analyze?pretty' -d '{
"analyzer" : "ik_max_word",
"text": "中华人民共和国国歌"
}'
curl localhost:9200 # 测试
kibana:6.8.12
宿主机:
cd /data/docker/monitor/kibana/zh-cn
git clone https://github.com/anbai-inc/Kibana_Hanization.git
容器:
cd /opt/Kibana_Hanization
cp -a translations /usr/share/kibana/src/legacy/core_plugins/kibana/
==========时间===========
1.高级设置
- > Date format -> YYYY-MM-DD HH:mm:ss
-> dateFormat:tz -> Asia/Shanghai
-> dateFormat:dow -> Monday
2.filter {
mutate {
gsub => ["message", "\\x", "\\\x"]
gsub => [
"time", "[+]", "T"
]
replace => ["time","%{time}+08:00"]
}
.....
=====================
filebeat:6.8.12
安装filebeat:
Install Elasticsearch GeoIP and user agent plugins
bin/elasticsearch-plugin install ingest-geoip
bin/elasticsearch-plugin install ingest-user-agent
# cd /data/site/go/htmltoo.ssh/tools/soft/src/common/monitor/
# wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-6.8.12-x86_64.rpm
cd /opt
wget https://abc.htmltoo.com/:7777/src/common/monitor/filebeat-6.8.12-x86_64.rpm
rpm -vi filebeat-6.8.12-x86_64.rpm
vim /etc/filebeat/filebeat.yml
#=========================== Filebeat inputs =============================
filebeat.inputs:
- type: log
enabled: true
paths:
- /usr/local/nginx/logs/*.log
- /opt/logs/*.log
- /mnt/html_tomcat8/logs/catalina.out
- /mnt/wx_beyoungsew_tomcat8/logs/catalina.out
- /mnt/mer1_tomcat8/logs/catalina.out
- /mnt/wx2_juvv_tomcat8/logs/catalina.out
#keys_under_root可以让字段位于根节点,默认为false
json.key_under_root: true
#对于同名的key,覆盖原有key值
json.overwrite_keys: true
#选择额外的字段进行输出
fields:
ip: 106.15.90.54
#如果值为ture,那么fields存储在输出文档的顶级位置
fields_under_root: true
#添加标签,用过过滤
#tags: ["nginx"]
#指定Filebeat忽略指定时间段以外修改的日志内容,比如2h(两个小时)或者5m(5分钟)
ignore_older: 96h
#如果在制定时间没有被读取,将关闭文件句柄
close_inactive: 48h
#支持正则 include_lines执行完毕之后会执行exclude_lines。
#include_lines: ['^ERR', '^WARN']
include_lines: [".*ERR.*",".*WARN.*",".*err.*",".*warn.*"]
#支持正则 排除匹配的行,如果有多行,合并成一个单一行来进行过滤
# exclude_lines: ['^INFO']
exclude_lines: [".*info.*",".*INFO.*"]
#设置删除不必要的字段
processors:
- drop_fields:
fields: ["@timestamp", "prospector.type", "input.type", "beat.hostname", "beat.version", "beat.name", "host.name", "offset"]
#====================
output.elasticsearch:
hosts: ["https://ssh.domsn.com:9200"]
#username: "elastic"
#password: "changeme"
setup.kibana:
host: "https://ssh.domsn.com:5601"
---Start Filebeat
service filebeat start
service filebeat stop
service filebeat restart
service filebeat status