imbadatcoding
imbadatcoding

Reputation: 57

Geoip logstash filter

I have a config file which is like this:

input {
  file {
      path => "/home/kibana/Documents/external_noise.log"
      type => "external_noise"
      start_position => "beginning"
      sincedb_path => "/dev/null"
  }
}
filter {

grok {
match => { 'message' => '%{CISCOTIMESTAMP:timestamp} %{WORD:action}%{SPACE}%{DATA:logsource} %{DATA:interface} %{GREEDYDATA:kvpairs}' }

     }


kv   {
source => "kvpairs"
field_split => ";"
value_split => ":"
remove_field => "kvpairs"
}
mutate {
    remove_field => [ "message" ]

}
geoip {

source => "src"
target => "geoip"
database => "/etc/logstash/GeoLiteCity.dat"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}

 mutate {
convert => [ " [geoip][coordinates]", "float"]
}


date {
    match => [ "timestamp" , "MMM dd HH:mm:ss" ]
    target => "@timestamp"
    }

if "_grokparsefailure" in [tags] {
    drop {}
    }

}


output {

stdout {
    codec => rubydebug
        }
elasticsearch {
    action => "index"
    host => "localhost"
    index => "external-%{+dd.MM.YYYY}"
    workers => 1

}
}

My sample log files are these:

Jan 1 22:54:17 drop   %LOGSOURCE% >eth1 rule: 7; rule_uid: {C1336766-9489-4049-9817-50584D83A245}; src: 70.77.116.190; dst: %DSTIP%; proto: tcp; product: VPN-1 & FireWall-1; service: 445; s_port: 2612;
Jan 1 22:54:22 drop   %LOGSOURCE% >eth1 rule: 7; rule_uid: {C1336766-9489-4049-9817-50584D83A245}; src: 61.164.41.144; dst: %DSTIP%; proto: udp; product: VPN-1 & FireWall-1; service: 5060; s_port: 5069;
Jan 1 22:54:23 drop   %LOGSOURCE% >eth1 rule: 7; rule_uid: {C1336766-9489-4049-9817-50584D83A245}; src: 69.55.245.136; dst: %DSTIP%; proto: tcp; product: VPN-1 & FireWall-1; service: 445; s_port: 2970;
Jan 1 22:54:41 drop   %LOGSOURCE% >eth1 rule: 7; rule_uid: {C1336766-9489-4049-9817-50584D83A245}; src: 95.104.65.30; dst: %DSTIP%; proto: tcp; product: VPN-1 & FireWall-1; service: 445; s_port: 2565;
Jan 1 22:54:43 drop   %LOGSOURCE% >eth1 rule: 7; rule_uid: {C1336766-9489-4049-9817-50584D83A245}; src: 222.186.24.11; dst: %DSTIP%; proto: tcp; product: VPN-1 & FireWall-1; service: 2967; s_port: 6000;
Jan 1 22:54:54 drop   %LOGSOURCE% >eth1 rule: 7; rule_uid: {C1336766-9489-4049-9817-50584D83A245}; src: 74.204.108.202; dst: %DSTIP%; proto: udp; product: VPN-1 & FireWall-1; service: 137; s_port: 53038;
Jan 1 22:55:10 drop   %LOGSOURCE% >eth1 rule: 7; rule_uid: {C1336766-9489-4049-9817-50584D83A245}; src: 71.111.186.26; dst: %DSTIP%; proto: tcp; product: VPN-1 & FireWall-1; service: 445; s_port: 38548;

I tried to visualize my geoip on Kibana but it shows that no results were found. Is there something with my geoip configuration? I downloaded the database and I had no problem with that. But it seems that geoip couldn't read my src which stores the ip address? Also, I expanded the table of fields. I did not see some new geoip fields that contain information about the src IP address being mmapped to a real geographical location..

NEED SOME HELP BOYS

Upvotes: 0

Views: 4604

Answers (1)

Val
Val

Reputation: 217254

The only issue you have is that your kv filter doesn't properly split out fields because of the spaces.

Right now, when logstash parses your logs, you're getting events like this:

{
      "@version" => "1",
    "@timestamp" => "2015-01-01T22:15:13.000Z",
          "host" => "iMac-de-Consulthys.local",
          "path" => "/home/kibana/Documents/external_noise.log",
          "type" => "external_noise",
     "timestamp" => "Jan 1 23:15:13",
        "action" => "drop",
     "logsource" => "%LOGSOURCE%",
     "interface" => ">eth1",
          " rule" => " 7",
      " rule_uid" => " {C1336766-9489-4049-9817-50584D83A245}",
           " src" => " 218.8.245.123",
           " dst" => " %DSTIP%",
         " proto" => " tcp",
       " product" => " VPN-1&FireWall-1",
       " service" => " 2967",
        " s_port" => " 6000",
}

You can notice that all the fields extracted by the kv filter have a space at the beginning. Which means that the geoip filter cannot find the src field.

So all you have to do is to modify your kv filter to trim your keys and values, like this:

kv   {
    source => "kvpairs"
    field_split => ";"
    value_split => ":"
    trim => "\s"                 <--- add this line
    trimkey => "\s"              <--- add this line
    remove_field => "kvpairs"
}

Then you'll get nice events with a properly created geoip field like this:

{
      "@version" => "1",
    "@timestamp" => "2015-01-01T22:15:13.000Z",
          "host" => "iMac-de-Consulthys.local",
          "path" => "/home/kibana/Documents/external_noise.log",
          "type" => "external_noise",
     "timestamp" => "Jan 1 23:15:13",
        "action" => "drop",
     "logsource" => "%LOGSOURCE%",
     "interface" => ">eth1",
          "rule" => "7",
      "rule_uid" => "{C1336766-9489-4049-9817-50584D83A245}",
           "src" => "218.8.245.123",
           "dst" => "%DSTIP%",
         "proto" => "tcp",
       "product" => "VPN-1&FireWall-1",
       "service" => "2967",
        "s_port" => "6000",
         "geoip" => {
                      "ip" => "218.8.245.123",
           "country_code2" => "CN",
           "country_code3" => "CHN",
            "country_name" => "China",
          "continent_code" => "AS",
             "region_name" => "08",
               "city_name" => "Harbin",
                "latitude" => 45.75,
               "longitude" => 126.64999999999998,
                "timezone" => "Asia/Harbin",
        "real_region_name" => "Heilongjiang",
                "location" => [
            [0] 126.64999999999998,
            [1] 45.75
        ],
             "coordinates" => [
            [0] 126.64999999999998,
            [1] 45.75
        ]
    }
}

Upvotes: 1

Related Questions