LoginSignup
6
8

More than 5 years have passed since last update.

ELK(Elasticsearch Logstash Kibana) + Filebeat + nginxをdocker-composeで起動

Posted at

はじめに

前回
docker上でしnginxを動かしaccessログをLogstashでelasticsearchにためていましたが、
それに Filebeatでlogstashに送信するように変更しました。

ソースは github にあげました。

環境

  • docker-compoase
  • elasticsearch
  • kibana
  • Logstash
  • Filebeat
  • nginx

ディレクトリ構成

└── es_logstash
    └── es_d
        ├── docker-compose.yml
        ├── Dockerfile
        └── config
            └── elasticsearch.yml
    └── kibana_d
        ├── docker-compose.yml
        ├── Dockerfile
        └── config
            └── kibana.yml
    └── logstash_d
        ├── docker-compose.yml
        ├── Dockerfile
        └── config
            └── logstash.conf
    └── beats_d
        ├── docker-compose.yml
        ├── Dockerfile
        └── config
            └── filebeat.conf
    └── nginx_d
        └── docker-compose.yml
es_d/docker-compose.yml
version: '2'
services:
  elasticsearch:
    mem_limit: 1024m
    build: .
    container_name: es_c_el
    image: es_i_el:1.0.5
    volumes:
      - ../data/es:/usr/share/elasticsearch/data
    ports:
      - 9200:9200
    environment:
      - ES_JAVA_OPTS=-Xms512m -Xmx512m
es_d/Dockerfile
FROM docker.elastic.co/elasticsearch/elasticsearch:6.2.3

COPY ./config/elasticsearch.yml /usr/share/elasticsearch/config/elasticsearch.yml

# kuromojiをインストール
RUN elasticsearch-plugin  install analysis-kuromoji

# RUN elasticsearch-plugin remove x-pack
es_d/config/elasticsearch.yml
http.host: 0.0.0.0

cluster.name: "docker-cluster"
discovery.type: single-node

### x-pack functions
xpack.security.enabled: false
# 無償利用は1クラスタまで
xpack.monitoring.enabled: true
xpack.graph.enabled: false
xpack.watcher.enabled: false
kibana_d/docker-compose.yml
version: '2'
services:
  kibana:
    mem_limit: 512m
    build: .
    container_name: kibana_c_el
    image: kibana_i_el:1.0.4
    external_links:
      - elasticsearch
    ports:
      - 5601:5601
    networks:
      - default
      - es1_default

networks:
  es1_default:
    external:
      name: es_d_default
kibana_d/Dockerfile
FROM docker.elastic.co/kibana/kibana:6.2.3

COPY ./config/kibana.yml /opt/kibana/config/kibana.yml
# RUN kibana-plugin remove x-pack
kibana_d/kibana.yml
server.name: kibana
server.host: "0"
elasticsearch.url: http://elasticsearch:9200
elasticsearch.username: elastic
elasticsearch.password: changeme
# xpack.monitoring.ui.container.elasticsearch.enabled: true
logstash_d/docker-compose.yml
version: '2'
services:
  logstash:
    mem_limit: 512m
    build: .
    container_name: logstash_c_el
    image: logstash_i_el:1.0.21
    external_links:
      - elasticsearch
    ports:
      - 5044:5044
    networks:
      - default
      - es1_default

networks:
  es1_default:
    external:
      name: es_d_default
logstash_d/Dockerfile
FROM docker.elastic.co/logstash/logstash:6.2.3

ADD ./config/logstash.conf /usr/share/logstash/pipeline/logstash.conf
logstash_d/config/logstash.conf
input {
  beats {
    port => 5044
  }
}

filter {
  grok {
    match => { "message" => ["%{IPORHOST:[nginx][access][remote_ip]} - %{DATA:[nginx][access][user_name]} \[%{HTTPDATE:[nginx][access][time]}\] \"%{WORD:[nginx][access][method]} %{DATA:[nginx][access][url]} HTTP/%{NUMBER:[nginx][access][http_version]}\" %{NUMBER:[nginx][access][response_code]} %{NUMBER:[nginx][access][body_sent][bytes]} \"%{DATA:[nginx][access][referrer]}\" \"%{DATA:[nginx][access][agent]}\""] }
    remove_field => "message"
  }
  mutate {
    add_field => { "read_timestamp" => "%{@timestamp}" }
  }
  date {
    match => [ "[nginx][access][time]", "dd/MMM/YYYY:H:m:s Z" ]
    remove_field => "[nginx][access][time]"
  }
  useragent {
    source => "[nginx][access][agent]"
    target => "[nginx][access][user_agent]"
    remove_field => "[nginx][access][agent]"
  }
  geoip {
    source => "[nginx][access][remote_ip]"
    target => "[nginx][access][geoip]"
  }
}

output {
  elasticsearch {
    hosts => [ 'elasticsearch' ]
    index => "access_log1"
  }
}
beats_d/docker-compose.yml
version: '2'
services:
  beats:
    mem_limit: 512m
    build: .
    container_name: beats_c_el
    image: beats_i_el:1.0.1
    volumes:
      - ../data/nginx:/var/log/nginx/
    external_links:
      - logstash
    networks:
      - default
      - logstash1_default

networks:
  logstash1_default:
    external:
      name: logstash_d_default
beats_d/config/filebeat.conf
filebeat:
  prospectors:
    - paths:
        - /var/log/nginx/access.log
      input_type: log

output:
  logstash:
    hosts: ["logstash:5044"]
nginx_d/docker-compose.yml
version: '2'
services:
  web:
    mem_limit: 512m
    image: nginx:1.10
    ports:
      - "80:80"
    volumes:
      - ../data/nginx:/var/log/nginx

動作確認

コンテナを起動

コンテナをelasticsearch, Logstash, kibana, nginx の順にアップします。

$ docker-compose up -d

nginxにアクセス

$ curl http://localhost

access.log が更新され、Filebeat, Logstash経由でelasticsearchに格納されます。

elasticsearch

$ curl -XGET 'http://localhost:9200/_cat/count/access_log1'

nginx にアクセスした回数が返ってきます。 

kibana

kibanaにアクセスして GET /access_log1/_search?pretty=trueを実行すれば確認できます。

最後に

elasticsearch深いです。

github

6
8
0

Register as a new user and use Qiita more conveniently

  1. You get articles that match your needs
  2. You can efficiently read back useful information
  3. You can use dark theme
What you can do with signing up
6
8