#logstash
/etc/logstash/csv.conf
input {
file {
mode => "tail"
path => ["/home/work/*.csv"]
sincedb_path => "/home/work/sincedb/*_log.csv"
start_position => "beginning"
codec => plain {
charset => "UTF-8"
}
}
}
filter {
csv {
columns => ["Date", "ID", "contents"]
convert => {
"ID" => "integer"
}
skip_header => true
}
date {
match => ["Date", "yyyy-MM-dd HH:mm:ss"]
}
}
output {
elasticsearch {
hosts => "localhost:9200"
index => "log"
}
stdout {
codec => rubydebug
}
}
###実行
mkdir /home/work/sincedb
/usr/share/logstash/bin/logstash -f /etc/logstash/csv.conf
###indexが作成されているか確認
curl -X GET http://localhost:9200/_cat/indices?v
###kibanaで可視化
・kibanaでインデックスパターンを作成
・logの設定 menu> ログ > 設定 > ログインデックス > log*を追加
#参考
https://aslead.nri.co.jp/column/how-to-ingest-data-into-elasticsearch.html
https://www.casleyconsulting.co.jp/blog/engineer/253/