Reputation: 66
On working elastic stack I'm trying to send mysql slowlog to OSS elasticsearch cluster.
Data is saved to ES, including [event][dataset]
for example, but everything under [mysql][slowlog]
or even [mysql]
arrays is being discarded.
This is my pipeline config:
input {
beats {
port => 5404
ssl => true
client_inactivity_timeout => 240
ssl_certificate_authorities => ["........pem"]
ssl_certificate => ".........pem"
ssl_key => "...........pem"
ssl_verify_mode => "force_peer"
}
}
filter {
if [fileset][module] == "mysql" {
if [fileset][name] == "error" {
# [mysql][error] part, not that relevant
}
else if [fileset][name] == "slowlog" {
grok {
match => { "message" => ["^# User@Host: %{USER:[mysql][slowlog][user]}(\[[^\]]+\])? @\s* \[%{IP:[mysql][slowlog][host]}\] \s*Id:\s* %{NUMBER:[mysql][slowlog][id]}\n# Schema: %{GREEDYDATA:[mysql][slowlog][schema]}\s* Last_errno: %{NUMBER:[mysql][slowlog][last_errno]}\s* Killed: %{NUMBER:[mysql][slowlog][killed]}\n# Query_time: %{NUMBER:[mysql][slowlog][query_time][sec]}\s* Lock_time: %{NUMBER:[mysql][slowlog][lock_time][sec]}\s* Rows_sent: %{NUMBER:[mysql][slowlog][rows_sent]}\s* Rows_examined: %{NUMBER:[mysql][slowlog][rows_examined]}\s* Rows_affected: %{NUMBER:[mysql][slowlog][rows_affected]}\n# Bytes_sent: %{NUMBER:[mysql][slowlog][bytes_sent]}\n(SET timestamp=%{NUMBER:[mysql][slowlog][timestamp]};\n)?%{GREEDYMULTILINE:[mysql][slowlog][query]}"] }
pattern_definitions => {
"GREEDYMULTILINE" => "(.|\n)*"
}
remove_field => "message"
}
date {
match => [ "[mysql][slowlog][timestamp]", "UNIX" ]
}
mutate {
gsub => ["[mysql][slowlog][query]", "\n# Time: [0-9]+ [0-9][0-9]:[0-9][0-9]:[0-9][0-9](\\.[0-9]+)?$", ""]
add_field => {"locktime" => "%{[mysql][slowlog][lock_time][sec]}"}
}
}
}
}
output {
elasticsearch {
user => "logstash_writer"
password => "******"
hosts => ["localhost:9200", "localhost:9201"]
index => "%{[@metadata][beat]}-%{+YYYY.MM}"
}
}
even if i try to save some value to duration
field instead of [mysql][slowlog][query_time][sec]
, or to copy value of [mysql][slowlog][lock_time][sec]
to new field locktime
, field doesn't get created on ES. I've really been readying the docs and surfing for similar problem for days, but can't get a clue what's wrong with my pipeline...
this is the mapping that gets created on ES:
{
"mapping": {
"_doc": {
"properties": {
"@timestamp": {
"type": "date"
},
"@version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"agent": {
"properties": {
"ephemeral_id": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"hostname": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"id": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"type": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"ecs": {
"properties": {
"version": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"event": {
"properties": {
"dataset": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"module": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"fileset": {
"properties": {
"name": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"host": {
"properties": {
"name": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"input": {
"properties": {
"type": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"log": {
"properties": {
"file": {
"properties": {
"path": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"flags": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"offset": {
"type": "long"
}
}
},
"message": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
},
"service": {
"properties": {
"type": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
},
"tags": {
"type": "text",
"fields": {
"keyword": {
"type": "keyword",
"ignore_above": 256
}
}
}
}
}
}
}
and this is an example of ES entry:
{
"took" : 346,
"timed_out" : false,
"_shards" : {
"total" : 1,
"successful" : 1,
"skipped" : 0,
"failed" : 0
},
"hits" : {
"total" : {
"value" : 1220,
"relation" : "eq"
},
"max_score" : 1.0,
"hits" : [
{
"_index" : "filebeat-2020.07",
"_type" : "_doc",
"_id" : "SSYId3MBlOlESbQxaGQs",
"_score" : 1.0,
"_source" : {
"host" : {
"name" : "nos12"
},
"input" : {
"type" : "log"
},
"agent" : {
"version" : "7.6.1",
"type" : "filebeat",
"ephemeral_id" : "1decb58b-dc7e-4bf9-8942-df203ef1504f",
"hostname" : "nos12",
"id" : "ea3f44c8-faa3-48e5-869c-3a4703c67818"
},
"tags" : [
"beats_input_codec_plain_applied"
],
"event" : {
"dataset" : "mysql.slowlog",
"module" : "mysql"
},
"@timestamp" : "2020-07-22T14:47:44.559Z",
"fileset" : {
"name" : "slowlog"
},
"message" : """# User@Host: pmm[pmm] @ [127.0.0.1] Id: 5737606019
# Schema: Last_errno: 0 Killed: 0
# Query_time: 0.000215 Lock_time: 0.000052 Rows_sent: 0 Rows_examined: 0 Rows_affected: 0
# Bytes_sent: 361
SET timestamp=1595429264;
SELECT /* pmm-agent:perfschema */ `performance_schema`.`events_statements_history`.`SQL_TEXT`, `performance_schema`.`events_statements_history`.`DIGEST`, `performance_schema`.`events_statements_history`.`CURRENT_SCHEMA` FROM `performance_schema`.`events_statements_history` WHERE DIGEST IS NOT NULL AND SQL_TEXT IS NOT NULL;""",
"service" : {
"type" : "mysql"
},
"log" : {
"flags" : [
"multiline"
],
"file" : {
"path" : "/mysql_data/slowquery/slow.log"
},
"offset" : 10146495
},
"@version" : "1",
"ecs" : {
"version" : "1.4.0"
}
}
}
]
}
}
slowlog example:
# Schema: Last_errno: 0 Killed: 0
# Query_time: 0.000211 Lock_time: 0.000055 Rows_sent: 0 Rows_examined: 0 Rows_affected: 0
# Bytes_sent: 361
SET timestamp=1595435719;
SELECT /* pmm-agent:perfschema */ `performance_schema`.`events_statements_history`.`SQL_TEXT`, `performance_schema`.`events_statements_history`.`DIGEST`, `performance_schema`.`events_statements_history`.`CURRENT_SCHEMA` FROM `performance_schema`.`events_statements_history` WHERE DIGEST IS NOT NULL AND SQL_TEXT IS NOT NULL;
grok debugger result:
{
"[mysql][slowlog][rows_affected]": "0",
"[mysql][slowlog][schema]": " ",
"[mysql][slowlog][killed]": "0",
"[mysql][slowlog][query]": "SELECT /* pmm-agent:perfschema */ `performance_schema`.`events_statements_history`.`SQL_TEXT`, `performance_schema`.`events_statements_history`.`DIGEST`, `performance_schema`.`events_statements_history`.`CURRENT_SCHEMA` FROM `performance_schema`.`events_statements_history` WHERE DIGEST IS NOT NULL AND SQL_TEXT IS NOT NULL;",
"[mysql][slowlog][rows_examined]": "0",
"[mysql][slowlog][host]": "127.0.0.1",
"[mysql][slowlog][rows_sent]": "0",
"[mysql][slowlog][timestamp]": "1595429264",
"[mysql][slowlog][id]": "5737606019",
"[mysql][slowlog][last_errno]": "0",
"[mysql][slowlog][lock_time][sec]": "0.000052",
"[mysql][slowlog][user]": "pmm",
"[mysql][slowlog][query_time][sec]": "0.000215",
"[mysql][slowlog][bytes_sent]": "361"
}
it's interesting that even with the invalid grok pattern I get the same result, and there is no other pipeline catching filebeat data from mysql :/
Upvotes: 0
Views: 224
Reputation: 66
jenni
from discuss.elastic.co found the answer - solution was to change [fileset][module]
(did not exist) to [event][module]
:
https://discuss.elastic.co/t/logstash-not-saving-additional-fields/242220[1]
Upvotes: 1