sudo yum install nginx -y
sudo service nginx start
curl -i http://localhost
sudo chmod 644 /var/log/nginx
sudo chown -R ec2-user:ec2-user /usr/share/nginx/html
echo "<h1>Hello World</h1>" > /usr/share/nginx/html/hello.html
sudo yum remove java-1.7.0-openjdk.x86_64 -y
sudo yum install java-1.8.0-openjdk-devel.x86_64 -y
ulimit -a
sudo vi /etc/security/limits.conf
ec2-user hard nofile 65536
ec2-user soft nofile 65536
ec2-user hard nproc 65536
ec2-user soft nproc 65536
sudo vi /etc/rc.local
echo 1048575 > /proc/sys/vm/max_map_count
sudo sysctl -w fs.file-max=65536
cat /proc/sys/fs/file-max
sudo reboot
Elasticsearch
Kibana
Logstash (FluentD๋ก ๋์น ๊ฐ๋ฅ)
๋ฒ์ ์ ๋ง์ถฐ์ ์์ ํ๋ ๊ฒ์ด ์ข์ง๋ง, ์ต์ ๋ฒ์ ์ผ๋ก ์์ ํด๋ ๋์ํจ(2016/04/03 ํ์ฌ)
Elasticsearch์ Kibana๋ ๊ถ์ฅ ๋ฒ์ ์ ๋ง์ถฐ์ผ ํจ
์ค์น ์์น ~/local/ ๋๋ /opt/ ๊ถ์ฅ
mkdir ~/local
cd ~/local
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-5.6.4.tar.gz
tar xvfz elasticsearch-5.6.4.tar.gz
ln -s elasticsearch-5.6.4 elasticsearch
cd elasticsearch
bin/elasticsearch -d
# ๋ฐ๋ชฌ(๋ฐฑ๊ทธ๋ผ์ด๋)๋ก ์คํ. ์ต์
-d๋ฅผ ๋นผ๋ฉด ํฐ๋ฏธ๋ ์ ์ํด ์๋ ๋์๋ง ์คํ
curl -i http://localhost:9200/
cd ~/local
wget https://artifacts.elastic.co/downloads/kibana/kibana-5.6.4-linux-x86_64.tar.gz
tar xvfz kibana-5.6.4-linux-x86_64.tar.gz
ln -s kibana-5.6.4-linux-x86_64 kibana
cd kibana
bin/kibana
# background run
nohup bin/kibana &
curl localhost:5601
cd ~/local
wget https://artifacts.elastic.co/downloads/logstash/logstash-5.6.4.tar.gz
tar xvfz logstash-5.6.4.tar.gz
ln -s logstash-5.6.4 logstash
cd logstash
mkdir logconf
vi logconf/nginx.conf
input {
file {
path => "/var/log/nginx/access.log"
start_position => beginning
}
}
filter {
grok {
match => { "message" => "%{COMBINEDAPACHELOG}"}
}
geoip {
source => "clientip"
}
}
output {
elasticsearch {}
}
# test
bin/logstash -f logconf/nginx.conf -t
# run
bin/logstash -f logconf/nginx.conf
# background run
nohup bin/logstash -f logconf/nginx.conf &
cd ~/local/logstash
./bin/logstash-plugin install logstash-input-beats
cd ~/local
wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-5.6.4-linux-x86_64.tar.gz
tar xvfz filebeat-5.6.4-linux-x86_64.tar.gz
ln -s filebeat-5.6.4-linux-x86_64 filebeat
cd filebeat
# elasticsearch ๋ถ๋ถ #์ผ๋ก ์ฃผ์ ์ฒ๋ฆฌ
# output.elasticsearch:
#hosts: ["localhost:9200"]
# logstash ๋ถ๋ถ # ์ฃผ์ ํด์
output.logstash:
hosts: ["localhost:5044"]
# filebeat.yml ๋ด์ฉ ์ค ๋ก๊ทธ ์์น ๋ณ๊ฒฝ `/var/log/nginx/*.log`
input {
beats {
port => 5044
}
}
./filebeat -e -c filebeat.yml
echo "nohup ./filebeat -e -c filebeat.yml &" > start.sh
chmod +x start.sh
./start.sh
field{
mutate {
add_field => {
"reqs" => "%{request}"
}
}
}
field{
mutate {
split => ["reqs", "?"]
add_field => { "uri" => "%{[reqs][0]}" }
add_field => { "req_uri" => "%{[reqs][0]}" }
# add_field => { "querystring" => "%{[reqs][1]}" }
}
}
mutate {
remove_field => [
"reqs",
"uri"
]
}
filter {
mutate {
add_field => {
"tmp" => "%{request}"
}
}
if [tmp] =~ "\?" {
mutate {
split => [
"tmp", "?"
]
add_field => {
"params" => "%{[tmp][1]}"
}
}
kv {
field_split => "&"
source => "params"
include_keys => [ "category", "utm_source" ]
prefix => "param_"
}
}
}
# params
if [request] =~ "\?" {
kv {
field_split => "&"
source => "querystring"
include_keys => [ "query", "redirectUrl" ]
prefix => "param_"
}
}
filter {
if [message] =~ "^#|\.(css|js|ico|png|xml|jpg|JPG|gif|jpeg|eot|htc\?) " {
drop {}
}
}
if [agent] =~ "Mediapartners" {
drop {}
}
if [device] == "Spider" {
drop {}
}
useragent {
source => "agent"
}
date {
match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
}
urldecode {
field => "params"
}
mutate {
convert => [ "bytes", "integer" ]
}
filter {
grok {
match => [
"message", "%{COMBINEDAPACHELOG}",
"message", "%{COMMONAPACHELOG}"
]
}
}
output {
elasticsearch {
hosts => "localhost:9200"
manage_template => false
index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[@metadata][type]}"
}
}
mutate {
gsub => [ 'message', '\\x22', '']
gsub => [ 'message', '"@fields": {', '']
gsub => [ 'message', '} }', '}']
gsub => [ 'message', '@', '']
}
curl -XPUT http://localhost:9200/my_index/ -d '
{
"mappings" : {
"logs" : {
"properties" : {
"location" : { "type" : "geo_point"}
}
}
}
}'
filter {
csv {
columns => ["lv","region_addr",
"latitude","longitude","cnt"]
}
mutate {
convert => {"longitude" => "float"}
convert => {"latitude" => "float"}
add_field => ["location", "%{longitude}"]
add_field => ["location", "%{latitude}"]
}
mutate {
convert => [ "location", "float" ]
}
}
lv,region_addr,latitude,longitude,cnt
1,๊ฐ์,37.88532579,127.729829,7
curl -XDELETE http://localhost:9200/logstash*
sudo yum install httpd-tools -y
sudo htpasswd -c /etc/nginx/htpasswd.users kibanaadmin
sudo htpasswd /etc/nginx/htpasswd.users kenuheo
sudo vi /etc/nginx/nginx.conf
server_name:
์๋ kibana ํ๋ก์ ์ค์ auth_basic "Restricted Access";
auth_basic_user_file /etc/nginx/htpasswd.users;
location / {
sendfile off;
proxy_pass http://127.0.0.1:5601;
proxy_redirect default;
proxy_http_version 1.1;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_cache_bypass $http_upgrade;
proxy_max_temp_file_size 0;
}
sudo service nginx start
npm install -g pm2
cd ~/local/kibana
pm2 start bin/cli
pm2 list
Logstash grok patterns
ELKR (ElasticSearch + Logstash + Kibana + Redis) ๋ฅผ ์ด์ฉํ ๋ก๊ทธ๋ถ์ ํ๊ฒฝ ๊ตฌ์ถํ๊ธฐ
2016 ELK ์คํ์ผ๋ก ์์ธ์ ์งํ์ฒ ๋์๋ณด๋ ๋ง๋ค๊ธฐ ์ถ์ฒ
EMOCON 2015 F/W ELK ์คํ์ ์ฌ์ฉํ ์์ธ์ ์งํ์ฒ ๋์๋ณด๋ ๋ง๋ค๊ธฐ
ELK ๊ตฌ์ถํ๊ธฐ 1 โ LOGSTASH
[Ubuntu] ELK ์ค์น ๋ฐ ํ ์คํธ ํ๊ธฐ
Splunk ๋์ฒด Solution์ผ๋ก์์ ELK Stack
How To Install Elasticsearch, Logstash, and Kibana 4 on Ubuntu 14.04
ELK ํ๋ก๊ทธ๋๋ฐ ๋ฐฉ์ก ์์
Logstash Configuration
Elasticsearch(Lucene) Query Syntax
ELK Kibana ์ฌ์ฉ๋ฒ
okdevtv.conf