From 6ce8ab4e7fbeee248ba9875667c71d89a4b37baa Mon Sep 17 00:00:00 2001 From: Jaime Soriano Pastor Date: Thu, 21 Mar 2019 11:13:13 +0100 Subject: [PATCH 1/4] Don't use multiline options in Logstash module with json format (#11346) (cherry picked from commit f76f2936bd12c96b4312cdb89a2b2f88d03d0302) --- CHANGELOG.next.asciidoc | 24 +++++++++++++++++++++ filebeat/module/logstash/log/config/log.yml | 3 +++ 2 files changed, 27 insertions(+) diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index 53a3c34304a..2cb684db11a 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -43,6 +43,30 @@ https://github.com/elastic/beats/compare/v6.7.0...6.x[Check the HEAD diff] *Filebeat* +- Add `convert_timezone` option to Elasticsearch module to convert dates to UTC. {issue}9756[9756] {pull}9761[9761] +- Support IPv6 addresses with zone id in IIS ingest pipeline. + {issue}9836[9836] error log: {pull}9869[9869], access log: {pull}9955[9955]. +- Support haproxy log lines without captured headers. {issue}9463[9463] {pull}9958[9958] +- Make elasticsearch/audit fileset be more lenient in parsing node name. {issue}10035[10035] {pull}10135[10135] +- Fix bad bytes count in `docker` input when filtering by stream. {pull}10211[10211] +- Fixed data types for roles and indices fields in `elasticsearch/audit` fileset {pull}10307[10307] +- Ensure `source.address` is always populated by the nginx module (ECS). {pull}10418[10418] +- Add support for Cisco syslog format used by their switch. {pull}10760[10760] +- Cover empty request data, url and version in Apache2 module{pull}10730[10730] +- Fix registry entries not being cleaned due to race conditions. {pull}10747[10747] +- Improve detection of file deletion on Windows. {pull}10747[10747] +- Fix errors in filebeat Zeek dashboard and README files. Add notice.log support. {pull}10916[10916] +- Fix a bug when converting NetFlow fields to snake_case. {pull}10950[10950] +- Add on_failure handler for Zeek ingest pipelines. Fix one field name error for notice and add an additional test case. {issue}11004[11004] {pull}11105[11105] +- Fix goroutine leak happening when harvesters are dynamically stopped. {pull}11263[11263] +- Fix issue preventing docker container events to be stored if the container has a network interface without ip address. {issue}11225[11225] {pull}11247[11247] +- Add on_failure handler for Zeek ingest pipelines. Fix one field name error for notice and add an additional test + case. {issue}11004[11004] {pull}11105[11105] +- Change URLPATH grok pattern to support brackets. {issue}11135[11135] {pull}11252[11252] +- Add support for iis log with different address format. {issue}11255[11255] {pull}11256[11256] +- Add fix to parse syslog message with priority value 0. {issue}11010[11010] +- Don't apply multiline rules in Logstash json logs. {pull}11346[11346] + *Heartbeat* *Journalbeat* diff --git a/filebeat/module/logstash/log/config/log.yml b/filebeat/module/logstash/log/config/log.yml index e1671a4afb0..8ecf4e1db78 100644 --- a/filebeat/module/logstash/log/config/log.yml +++ b/filebeat/module/logstash/log/config/log.yml @@ -4,10 +4,13 @@ paths: - {{$path}} {{ end }} exclude_files: [".gz$"] + +{{ if eq .format "plain" }} multiline: pattern: ^\[[0-9]{4}-[0-9]{2}-[0-9]{2} negate: true match: after +{{ end }} {{ if .convert_timezone }} processors: From 80db7a7ece2928d98de03ad60b2b0bef18ec234c Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Tue, 2 Apr 2019 06:37:25 -0700 Subject: [PATCH 2/4] Fix CHANGELOG --- CHANGELOG.next.asciidoc | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/CHANGELOG.next.asciidoc b/CHANGELOG.next.asciidoc index 2cb684db11a..03ea1be81ac 100644 --- a/CHANGELOG.next.asciidoc +++ b/CHANGELOG.next.asciidoc @@ -43,28 +43,6 @@ https://github.com/elastic/beats/compare/v6.7.0...6.x[Check the HEAD diff] *Filebeat* -- Add `convert_timezone` option to Elasticsearch module to convert dates to UTC. {issue}9756[9756] {pull}9761[9761] -- Support IPv6 addresses with zone id in IIS ingest pipeline. - {issue}9836[9836] error log: {pull}9869[9869], access log: {pull}9955[9955]. -- Support haproxy log lines without captured headers. {issue}9463[9463] {pull}9958[9958] -- Make elasticsearch/audit fileset be more lenient in parsing node name. {issue}10035[10035] {pull}10135[10135] -- Fix bad bytes count in `docker` input when filtering by stream. {pull}10211[10211] -- Fixed data types for roles and indices fields in `elasticsearch/audit` fileset {pull}10307[10307] -- Ensure `source.address` is always populated by the nginx module (ECS). {pull}10418[10418] -- Add support for Cisco syslog format used by their switch. {pull}10760[10760] -- Cover empty request data, url and version in Apache2 module{pull}10730[10730] -- Fix registry entries not being cleaned due to race conditions. {pull}10747[10747] -- Improve detection of file deletion on Windows. {pull}10747[10747] -- Fix errors in filebeat Zeek dashboard and README files. Add notice.log support. {pull}10916[10916] -- Fix a bug when converting NetFlow fields to snake_case. {pull}10950[10950] -- Add on_failure handler for Zeek ingest pipelines. Fix one field name error for notice and add an additional test case. {issue}11004[11004] {pull}11105[11105] -- Fix goroutine leak happening when harvesters are dynamically stopped. {pull}11263[11263] -- Fix issue preventing docker container events to be stored if the container has a network interface without ip address. {issue}11225[11225] {pull}11247[11247] -- Add on_failure handler for Zeek ingest pipelines. Fix one field name error for notice and add an additional test - case. {issue}11004[11004] {pull}11105[11105] -- Change URLPATH grok pattern to support brackets. {issue}11135[11135] {pull}11252[11252] -- Add support for iis log with different address format. {issue}11255[11255] {pull}11256[11256] -- Add fix to parse syslog message with priority value 0. {issue}11010[11010] - Don't apply multiline rules in Logstash json logs. {pull}11346[11346] *Heartbeat* From 5e58ac602eba0060ada9745fe18490abeb5193c1 Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Tue, 2 Apr 2019 06:46:14 -0700 Subject: [PATCH 3/4] Adding JSON log file + expected file --- .../logstash/log/test/logstash-json.log | 11 ++ .../log/test/logstash-json.log-expected.json | 174 ++++++++++++++++++ 2 files changed, 185 insertions(+) create mode 100644 filebeat/module/logstash/log/test/logstash-json.log create mode 100644 filebeat/module/logstash/log/test/logstash-json.log-expected.json diff --git a/filebeat/module/logstash/log/test/logstash-json.log b/filebeat/module/logstash/log/test/logstash-json.log new file mode 100644 index 00000000000..7ca6308f3e7 --- /dev/null +++ b/filebeat/module/logstash/log/test/logstash-json.log @@ -0,0 +1,11 @@ +{"level":"INFO","loggerName":"logstash.setting.writabledirectory","timeMillis":1554211185938,"thread":"main","logEvent":{"message":"Creating directory","setting":"path.queue","path":"/Users/shaunak/logstash-6.7.0/data/queue"}} +{"level":"INFO","loggerName":"logstash.setting.writabledirectory","timeMillis":1554211185989,"thread":"main","logEvent":{"message":"Creating directory","setting":"path.dead_letter_queue","path":"/Users/shaunak/logstash-6.7.0/data/dead_letter_queue"}} +{"level":"WARN","loggerName":"logstash.config.source.multilocal","timeMillis":1554211186100,"thread":"LogStash::Runner","logEvent":{"message":"Ignoring the 'pipelines.yml' file because modules or command line options are specified"}} +{"level":"INFO","loggerName":"logstash.runner","timeMillis":1554211186118,"thread":"LogStash::Runner","logEvent":{"message":"Starting Logstash","logstash.version":"6.7.0"}} +{"level":"INFO","loggerName":"logstash.agent","timeMillis":1554211186157,"thread":"LogStash::Runner","logEvent":{"message":"No persistent UUID file found. Generating new UUID","uuid":"9e1f7d9d-54df-4d5f-a8c6-a749cc6bc65e","path":"/Users/shaunak/logstash-6.7.0/data/uuid"}} +{"level":"INFO","loggerName":"logstash.pipeline","timeMillis":1554211191608,"thread":"Converge PipelineAction::Create
","logEvent":{"message":"Starting pipeline","pipeline_id":"main","pipeline.workers":8,"pipeline.batch.size":125,"pipeline.batch.delay":50}} +{"level":"INFO","loggerName":"logstash.pipeline","timeMillis":1554211191728,"thread":"Converge PipelineAction::Create
","logEvent":{"message":"Pipeline started successfully","pipeline_id":"main","thread":"#"}} +{"level":"INFO","loggerName":"logstash.agent","timeMillis":1554211191782,"thread":"Ruby-0-Thread-1: /Users/shaunak/logstash-6.7.0/lib/bootstrap/environment.rb:6","logEvent":{"message":"Pipelines running","count":1,"running_pipelines":[{"metaClass":{"metaClass":{"metaClass":{"running_pipelines":"[:main]","non_running_pipelines":[]}}}}]}} +{"level":"INFO","loggerName":"logstash.agent","timeMillis":1554211192126,"thread":"Api Webserver","logEvent":{"message":"Successfully started Logstash API endpoint","port":9600}} +{"level":"WARN","loggerName":"logstash.runner","timeMillis":1554211198930,"thread":"SIGINT handler","logEvent":{"message":"SIGINT received. Shutting down."}} +{"level":"INFO","loggerName":"logstash.pipeline","timeMillis":1554211199144,"thread":"[main]-pipeline-manager","logEvent":{"message":"Pipeline has terminated","pipeline_id":"main","thread":"#"}} diff --git a/filebeat/module/logstash/log/test/logstash-json.log-expected.json b/filebeat/module/logstash/log/test/logstash-json.log-expected.json new file mode 100644 index 00000000000..5288775eee7 --- /dev/null +++ b/filebeat/module/logstash/log/test/logstash-json.log-expected.json @@ -0,0 +1,174 @@ +[ + { + "@timestamp": "2019-04-02T13:19:45.938Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.path": "/Users/shaunak/logstash-6.7.0/data/queue", + "logstash.log.log_event.setting": "path.queue", + "logstash.log.message": "Creating directory", + "logstash.log.module": "logstash.setting.writabledirectory", + "logstash.log.thread": "main", + "offset": 0, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:45.989Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.path": "/Users/shaunak/logstash-6.7.0/data/dead_letter_queue", + "logstash.log.log_event.setting": "path.dead_letter_queue", + "logstash.log.message": "Creating directory", + "logstash.log.module": "logstash.setting.writabledirectory", + "logstash.log.thread": "main", + "offset": 227, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:46.100Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "WARN", + "logstash.log.message": "Ignoring the 'pipelines.yml' file because modules or command line options are specified", + "logstash.log.module": "logstash.config.source.multilocal", + "logstash.log.thread": "LogStash::Runner", + "offset": 478, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:46.118Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.logstash.version": "6.7.0", + "logstash.log.message": "Starting Logstash", + "logstash.log.module": "logstash.runner", + "logstash.log.thread": "LogStash::Runner", + "offset": 712, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:46.157Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.path": "/Users/shaunak/logstash-6.7.0/data/uuid", + "logstash.log.log_event.uuid": "9e1f7d9d-54df-4d5f-a8c6-a749cc6bc65e", + "logstash.log.message": "No persistent UUID file found. Generating new UUID", + "logstash.log.module": "logstash.agent", + "logstash.log.thread": "LogStash::Runner", + "offset": 885, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:51.608Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.pipeline.batch.delay": 50, + "logstash.log.log_event.pipeline.batch.size": 125, + "logstash.log.log_event.pipeline.workers": 8, + "logstash.log.log_event.pipeline_id": "main", + "logstash.log.message": "Starting pipeline", + "logstash.log.module": "logstash.pipeline", + "logstash.log.thread": "Converge PipelineAction::Create
", + "offset": 1158, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:51.728Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.pipeline_id": "main", + "logstash.log.log_event.thread": "#", + "logstash.log.message": "Pipeline started successfully", + "logstash.log.module": "logstash.pipeline", + "logstash.log.thread": "Converge PipelineAction::Create
", + "offset": 1421, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:51.782Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.count": 1, + "logstash.log.log_event.running_pipelines": [ + { + "metaClass": { + "metaClass": { + "metaClass": { + "non_running_pipelines": [], + "running_pipelines": "[:main]" + } + } + } + } + ], + "logstash.log.message": "Pipelines running", + "logstash.log.module": "logstash.agent", + "logstash.log.thread": "Ruby-0-Thread-1: /Users/shaunak/logstash-6.7.0/lib/bootstrap/environment.rb:6", + "offset": 1659, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:52.126Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.port": 9600, + "logstash.log.message": "Successfully started Logstash API endpoint", + "logstash.log.module": "logstash.agent", + "logstash.log.thread": "Api Webserver", + "offset": 1998, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:58.930Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "WARN", + "logstash.log.message": "SIGINT received. Shutting down.", + "logstash.log.module": "logstash.runner", + "logstash.log.thread": "SIGINT handler", + "offset": 2177, + "prospector.type": "log" + }, + { + "@timestamp": "2019-04-02T13:19:59.144Z", + "event.dataset": "logstash.log", + "fileset.module": "logstash", + "fileset.name": "log", + "input.type": "log", + "logstash.log.level": "INFO", + "logstash.log.log_event.pipeline_id": "main", + "logstash.log.log_event.thread": "#", + "logstash.log.message": "Pipeline has terminated", + "logstash.log.module": "logstash.pipeline", + "logstash.log.thread": "[main]-pipeline-manager", + "offset": 2335, + "prospector.type": "log" + } +] \ No newline at end of file From 9fa20e6577a3aa7eb28df49eba08625b52c6a3c6 Mon Sep 17 00:00:00 2001 From: Shaunak Kashyap Date: Tue, 2 Apr 2019 08:14:46 -0700 Subject: [PATCH 4/4] Making JSON log files work in module system test --- filebeat/tests/system/test_modules.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/filebeat/tests/system/test_modules.py b/filebeat/tests/system/test_modules.py index fa0c4b25c03..a04443cc510 100644 --- a/filebeat/tests/system/test_modules.py +++ b/filebeat/tests/system/test_modules.py @@ -129,6 +129,11 @@ def run_on_file(self, module, fileset, test_file, cfgfile): "-M", "*.*.input.close_eof=true", ] + # Based on the convention that if a name contains -json the json format is needed. Currently used for LS. + if "-json" in test_file: + cmd.append("-M") + cmd.append("{module}.{fileset}.var.format=json".format(module=module, fileset=fileset)) + output_path = os.path.join(self.working_dir) output = open(os.path.join(output_path, "output.log"), "ab") output.write(" ".join(cmd) + "\n")