ELK

์žฅ์ 

์‚ฌ์ „ ์ค€๋น„

ํ†ตํ•ฉ์„ค์น˜

curl -L https://okdevtv.com/md/elk/elastic-setup.sh | sh

nginx ์„ค์น˜(์ƒ˜ํ”Œ์šฉ)

sudo yum install nginx -y
sudo service nginx start
curl -i http://localhost
sudo chmod 644 /var/log/nginx
sudo chown -R ec2-user:ec2-user /usr/share/nginx/html
echo "<h1>Hello World</h1>" > /usr/share/nginx/html/hello.html

jdk 1.8

sudo yum remove java-1.7.0-openjdk.x86_64 -y
sudo yum install java-1.8.0-openjdk-devel.x86_64 -y
which javac
readfile -f `which javac`
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-1.8.0.201.b09-0.43.amzn1.x86_64

system env

ulimit -a

set

sudo vi /etc/security/limits.conf
ec2-user hard nofile 65536
ec2-user soft nofile 65536
ec2-user hard nproc 65536
ec2-user soft nproc 65536
sudo vi /etc/rc.local
echo 1048575 > /proc/sys/vm/max_map_count
sudo sysctl -w fs.file-max=65536
cat /proc/sys/fs/file-max
sudo reboot

AWS ํฌํŠธ ์„ค์ •

์„ค์น˜

Elasticsearch ์„ค์น˜

mkdir ~/local
cd ~/local
wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.8.1.tar.gz
tar xvfz elasticsearch-6.8.1.tar.gz
ln -s elasticsearch-6.8.1 elasticsearch
cd elasticsearch
bin/elasticsearch -d
  # ๋ฐ๋ชฌ(๋ฐฑ๊ทธ๋ผ์šด๋“œ)๋กœ ์‹คํ–‰. ์˜ต์…˜ -d๋ฅผ ๋นผ๋ฉด ํ„ฐ๋ฏธ๋„ ์ ‘์†ํ•ด ์žˆ๋Š” ๋™์•ˆ๋งŒ ์‹คํ–‰
curl -i http://localhost:9200/

Kibana ์„ค์น˜

cd ~/local
wget https://artifacts.elastic.co/downloads/kibana/kibana-6.8.1-linux-x86_64.tar.gz
tar xvfz kibana-6.8.1-linux-x86_64.tar.gz
ln -s kibana-6.8.1-linux-x86_64 kibana
cd kibana
bin/kibana
# background run
nohup bin/kibana &

Logstash ์„ค์น˜

cd ~/local
wget https://artifacts.elastic.co/downloads/logstash/logstash-6.8.1.tar.gz
tar xvfz logstash-6.8.1.tar.gz
ln -s logstash-6.8.1 logstash
cd logstash
mkdir logconf
vi logconf/nginx.conf
input {
    file {
        path => "/var/log/nginx/access.log"
        start_position => beginning
    }
}
filter {
    grok {
        match => { "message" => "%{COMBINEDAPACHELOG}"}
    }
    geoip {
        source => "clientip"
    }
}
output {
    elasticsearch {}
}
# test
bin/logstash -f logconf/nginx.conf -t
# run
bin/logstash -f logconf/nginx.conf
# background run
nohup bin/logstash -f logconf/nginx.conf &
curl 'http://localhost:9200/_template'
curl -XDELETE 'http://localhost:9200/_template/logstash'

curl -XPUT 'localhost:9200/_template/logstash?pretty' -H 'Content-Type: application/json' -d'
{
  "template" : "logstash-*",
  "version" : 60001,
  "settings" : {
    "index.refresh_interval" : "5s"
  },
  "mappings" : {
    "_default_" : {
      "dynamic_templates" : [ {
        "message_field" : {
          "path_match" : "message",
          "match_mapping_type" : "string",
          "mapping" : {
            "type" : "text",
            "norms" : false
          }
        }
      }, {
        "string_fields" : {
          "match" : "*",
          "match_mapping_type" : "string",
          "mapping" : {
            "type" : "text", "norms" : false,
            "fields" : {
              "keyword" : { "type": "keyword", "ignore_above": 256 }
            }
          }
        }
      } ],
      "properties" : {
        "@timestamp": { "type": "date"},
        "@version": { "type": "keyword"},
        "geoip"  : {
          "dynamic": true,
          "properties" : {
            "ip": { "type": "ip" },
            "location" : { "type" : "geo_point" },
            "latitude" : { "type" : "half_float" },
            "longitude" : { "type" : "half_float" }
          }
        }
      }
    }
  }
}
'

Filebeat

Filebeat with logstash

cd ~/local/logstash
./bin/logstash-plugin install logstash-input-beats
cd ~/local
wget https://artifacts.elastic.co/downloads/beats/filebeat/filebeat-6.8.1-linux-x86_64.tar.gz
tar xvfz filebeat-6.8.1-linux-x86_64.tar.gz
ln -s filebeat-6.8.1-linux-x86_64 filebeat
cd filebeat
# elasticsearch ๋ถ€๋ถ„ #์œผ๋กœ ์ฃผ์„ ์ฒ˜๋ฆฌ
  # output.elasticsearch:
    #hosts: ["localhost:9200"]
# logstash ๋ถ€๋ถ„ # ์ฃผ์„ ํ•ด์ œ
  output.logstash:
    hosts: ["localhost:5044"]

# filebeat.yml ๋‚ด์šฉ ์ค‘ ๋กœ๊ทธ ์œ„์น˜ ๋ณ€๊ฒฝ `/var/log/nginx/*.log`
input {
  beats {
    port => 5044
  }
}

์‹คํ–‰

./filebeat -e -c filebeat.yml
echo "nohup ./filebeat -e -c filebeat.yml &" > start.sh
chmod +x start.sh
./start.sh

Kibana ํ†ต๊ณ„

์‹œ๊ฐํ™”(Visualize)

๋Œ€์‹œ๋ณด๋“œ ๋งŒ๋“ค๊ธฐ

part 2

Logstash

field{
    mutate {
        add_field => {
            "reqs" => "%{request}"
        }
    }
}
field{
    mutate {
        split => ["reqs", "?"]
        add_field => { "uri" => "%{[reqs][0]}" }
        add_field => { "req_uri" => "%{[reqs][0]}" }
#        add_field => { "querystring" => "%{[reqs][1]}" }
    }
}
    mutate {
        remove_field => [
            "reqs",
            "uri"
        ]
    }
filter {
    mutate {
        add_field => {
            "tmp" => "%{request}"
        }
    }
    if [tmp] =~ "\?" {
        mutate {
            split => [
                "tmp", "?"
            ]
            add_field => {
                "params" => "%{[tmp][1]}"
            }
        }
        kv {
            field_split => "&"
            source => "params"
            include_keys => [ "category", "utm_source" ]
            prefix => "param_"
        }
    }
}
    # params
    if [request] =~ "\?" {
        kv {
            field_split => "&"
            source => "querystring"
            include_keys => [ "query", "redirectUrl" ]
            prefix => "param_"
        }
    }
filter {
    if [message] =~ "^#|\.(css|js|ico|png|xml|jpg|JPG|gif|jpeg|eot|htc\?) " {
        drop {}
    }
}
if [agent] =~ "Mediapartners" {
    drop {}
}
if [device] == "Spider" {
    drop {}
}
    useragent {
        source => "agent"
    }
    date {
        match => [ "timestamp", "dd/MMM/yyyy:HH:mm:ss Z" ]
    }
   urldecode {
       field => "params"
   }
    mutate {
        convert => [ "bytes", "integer" ]
    }
filter {
    grok {
        match => [
            "message", "%{COMBINEDAPACHELOG}",
            "message", "%{COMMONAPACHELOG}"
        ]
    }
}
output {
  elasticsearch {
    hosts => "localhost:9200"
    manage_template => false
    index => "%{[@metadata][beat]}-%{+YYYY.MM.dd}"
    document_type => "%{[@metadata][type]}"
  }
}
    mutate {
        gsub => [ 'message', '\\x22', '']
        gsub => [ 'message', '"@fields": {', '']
        gsub => [ 'message', '} }', '}']
        gsub => [ 'message', '@', '']
    }

geo_point

curl -XPUT http://localhost:9200/my_index/ -d '
{
  "mappings" : {
    "logs" : {
      "properties" : {
        "location" : { "type" : "geo_point"}
      }
    }
  }
}'
filter {
    csv {
        columns => ["lv","region_addr",
        "latitude","longitude","cnt"]
    }
    mutate {
        convert => {"longitude" => "float"}
        convert => {"latitude" => "float"}
        add_field => ["location", "%{longitude}"]
        add_field => ["location", "%{latitude}"]
    }
    mutate {
        convert => [ "location", "float" ]
    }
}
lv,region_addr,latitude,longitude,cnt
1,๊ฐ•์›,37.88532579,127.729829,7

Kibana

elasticsearch

Kibana ์ธ์ฆ with nginx

htpasswd ์„ค์น˜

sudo yum install httpd-tools -y
sudo htpasswd -c /etc/nginx/htpasswd.users kibanaadmin
sudo htpasswd /etc/nginx/htpasswd.users kenuheo

nginx ์„ค์ • ์ถ”๊ฐ€

sudo vi /etc/nginx/nginx.conf
        auth_basic "Restricted Access";
        auth_basic_user_file /etc/nginx/htpasswd.users;

        location / {
                sendfile off;
                proxy_pass         http://126.8.1.1:5601;
                proxy_redirect     default;
                proxy_http_version 1.1;
                proxy_set_header   Host              $host;
                proxy_set_header   X-Real-IP         $remote_addr;
                proxy_set_header   X-Forwarded-For   $proxy_add_x_forwarded_for;
                proxy_set_header   X-Forwarded-Proto $scheme;
                proxy_cache_bypass $http_upgrade;
                proxy_max_temp_file_size 0;
        }

Kibana with PM2

npm install -g pm2
cd ~/local/kibana
pm2 start bin/cli

์ฐธ๊ณ 

What Else?
inflearn react api server -50% ํ• ์ธ์ฟ ํฐ: 15108-f2af1e086101 buy me a coffee