From 7306cf0717af6deecd8477c7f0faf5f51db5a285 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:48:52 -0500 Subject: [PATCH 01/18] Add shared mage targets Convert all projects to using Mage https://magefile.org for building. Generate the travis config based on what the root magefile specifies. Remove some unused scripts and files. Update jenkins scripts to use mage for testing. Add a mage docs target that generates docs and opens the browser when PREVIEW is set. --- .gitignore | 10 +- .travis.yml | 326 ++++++++--- Jenkinsfile | 2 +- Makefile | 223 +++----- NOTICE.txt | 2 +- dev-tools/.gitignore | 1 - dev-tools/Makefile | 4 - dev-tools/README.md | 29 - dev-tools/ci/templates/travis.yml.tmpl | 146 +++++ .../module_include_list.go | 10 +- dev-tools/deploy | 25 - dev-tools/jenkins_ci.ps1 | 10 +- dev-tools/jenkins_ci.sh | 5 +- dev-tools/jenkins_release.sh | 2 +- dev-tools/mage/.gitignore | 2 +- dev-tools/mage/clean.go | 12 +- dev-tools/mage/common.go | 2 +- dev-tools/mage/dashboard.go | 37 +- dev-tools/mage/docs.go | 131 ++++- dev-tools/mage/fields.go | 49 +- dev-tools/mage/fmt.go | 4 +- dev-tools/mage/godaemon.go | 6 +- dev-tools/mage/integtest.go | 32 +- dev-tools/mage/kibana.go | 16 +- dev-tools/mage/modules.go | 87 ++- dev-tools/mage/pkg.go | 2 +- dev-tools/mage/pkgspecs.go | 14 +- dev-tools/mage/settings.go | 17 +- dev-tools/mage/target/build/build.go | 48 ++ dev-tools/mage/target/common/check.go | 41 ++ dev-tools/mage/target/common/clean.go | 25 + dev-tools/mage/target/common/fmt.go | 29 + dev-tools/mage/target/common/shared.go | 25 + .../mage/target/dashboards/dashboards.go | 64 +++ dev-tools/mage/target/docs/docs.go | 40 ++ dev-tools/mage/target/integtest/integtest.go | 82 +++ dev-tools/mage/target/pkg/test.go | 26 + dev-tools/mage/target/test/test.go | 35 ++ dev-tools/mage/target/unittest/unittest.go | 65 +++ dev-tools/make/gox.mk | 30 ++ dev-tools/make/mage.mk | 10 + dev-tools/make/mage_wrapper.mk | 50 ++ dev-tools/make/misspell.mk | 27 + dev-tools/make/oss.mk | 12 + dev-tools/make/python.mk | 17 + dev-tools/make/reviewdog.mk | 32 ++ dev-tools/make/xpack.mk | 48 +- .../windows/install-service.ps1.tmpl | 2 +- docs/magefile.go | 37 ++ libbeat/scripts/README.md | 2 - libbeat/scripts/create_packer.py | 69 --- libbeat/scripts/wait_for.sh | 22 - magefile.go | 507 ++++++++++++++++-- reviewdog.yml | 2 +- script/build_docs.sh | 35 -- script/config_collector.py | 95 ---- script/generate_imports.py | 58 -- script/modules_collector.py | 61 --- setup.yml | 1 - testing/environments/test.env | 5 + 60 files changed, 2039 insertions(+), 769 deletions(-) delete mode 100644 dev-tools/.gitignore delete mode 100644 dev-tools/Makefile create mode 100644 dev-tools/ci/templates/travis.yml.tmpl delete mode 100755 dev-tools/deploy create mode 100644 dev-tools/mage/target/build/build.go create mode 100644 dev-tools/mage/target/common/check.go create mode 100644 dev-tools/mage/target/common/clean.go create mode 100644 dev-tools/mage/target/common/fmt.go create mode 100644 dev-tools/mage/target/common/shared.go create mode 100644 dev-tools/mage/target/dashboards/dashboards.go create mode 100644 dev-tools/mage/target/docs/docs.go create mode 100644 dev-tools/mage/target/integtest/integtest.go create mode 100644 dev-tools/mage/target/pkg/test.go create mode 100644 dev-tools/mage/target/test/test.go create mode 100644 dev-tools/mage/target/unittest/unittest.go create mode 100644 dev-tools/make/gox.mk create mode 100644 dev-tools/make/mage_wrapper.mk create mode 100644 dev-tools/make/misspell.mk create mode 100644 dev-tools/make/oss.mk create mode 100644 dev-tools/make/python.mk create mode 100644 dev-tools/make/reviewdog.mk create mode 100644 docs/magefile.go delete mode 100644 libbeat/scripts/create_packer.py delete mode 100644 libbeat/scripts/wait_for.sh delete mode 100755 script/build_docs.sh delete mode 100644 script/config_collector.py delete mode 100644 script/generate_imports.py delete mode 100644 script/modules_collector.py delete mode 100644 setup.yml create mode 100644 testing/environments/test.env diff --git a/.gitignore b/.gitignore index 5781a1e43a3..f990c862f53 100644 --- a/.gitignore +++ b/.gitignore @@ -3,12 +3,13 @@ /.idea /.vscode /build -/*/data -/*/logs -/*/fields.yml /*/*.template*.json **/html_docs -/*/_meta/kibana.generated +*beat/fields.yml +*beat/_meta/kibana.generated +*beat/build +*beat/logs +*beat/data # Files .DS_Store @@ -19,6 +20,7 @@ coverage.out .python-version beat.db *.keystore +mage_output_file.go # Editor swap files *.swp diff --git a/.travis.yml b/.travis.yml index 7ea410b0bfe..67eb49216e4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,206 +1,396 @@ +# DO NOT EDIT - AUTO-GENERATED +# This file was generated by 'mage update:travisCI' from a template at +# dev-tools/ci/templates/travis.yml.tmpl. + sudo: required dist: trusty services: - docker +python: 2.7.14 + language: go -# Make sure project can also be built on travis for clones of the repo go_import_path: github.com/elastic/beats +stages: + - name: check + - name: test + - name: crosscompile + if: type != pull_request + env: global: # Cross-compile for amd64 only to speed up testing. - GOX_FLAGS="-arch amd64" + + # Build snapshots when testing packaging. + - SNAPSHOT=true + + # Dependency version. - DOCKER_COMPOSE_VERSION=1.21.0 - GO_VERSION="$(cat .go-version)" - # Newer versions of minikube fail on travis, see: https://github.com/kubernetes/minikube/issues/2704 + # Newer versions of minikube fail on travis. See https://github.com/kubernetes/minikube/issues/2704. - TRAVIS_MINIKUBE_VERSION=v0.25.2 + # Set Python version returned by pyenv. + - PYENV_VERSION=2.7.14 + jobs: include: - # General checks - os: linux - env: TARGETS="check" go: $GO_VERSION stage: check + env: + - BUILD_CMD="mage" + - TARGETS="check" - # Filebeat - os: linux - env: TARGETS="-C filebeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d libbeat" + - TARGETS="unitTest integTest" + - os: osx - env: TARGETS="TEST_ENVIRONMENT=0 -C filebeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d libbeat" + - TARGETS="unitTest" + - os: linux - env: TARGETS="-C x-pack/filebeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d auditbeat" + - TARGETS="unitTest integTest" + + - os: osx + go: $GO_VERSION + stage: test + env: + - BUILD_CMD="mage -d auditbeat" + - TARGETS="unitTest" - # Heartbeat - os: linux - env: TARGETS="-C heartbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d filebeat" + - TARGETS="unitTest integTest" + - os: osx - env: TARGETS="TEST_ENVIRONMENT=0 -C heartbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d filebeat" + - TARGETS="unitTest" - # Auditbeat - os: linux - env: TARGETS="-C auditbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d heartbeat" + - TARGETS="unitTest integTest" + - os: osx - env: TARGETS="TEST_ENVIRONMENT=0 -C auditbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d heartbeat" + - TARGETS="unitTest" + - os: linux - env: TARGETS="-C auditbeat crosscompile" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d journalbeat" + - TARGETS="integTest" + - os: linux - env: TARGETS="-C x-pack/auditbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d metricbeat" + - TARGETS="unitTest integTest" - # Libbeat - - os: linux - env: TARGETS="-C libbeat testsuite" + - os: osx go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d metricbeat" + - TARGETS="unitTest" + - os: linux - env: TARGETS="-C libbeat crosscompile" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d packetbeat" + - TARGETS="unitTest" + + - os: osx + go: $GO_VERSION + stage: test + env: + - BUILD_CMD="mage -d packetbeat" + - TARGETS="unitTest" + - os: linux - env: STRESS_TEST_OPTIONS="-timeout=20m -race -v -parallel 1" TARGETS="-C libbeat stress-tests" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d winlogbeat" + - TARGETS="unitTest" + - os: linux - env: TARGETS="-C x-pack/libbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d x-pack/libbeat" + - TARGETS="unitTest" - # Metricbeat - os: linux - env: TARGETS="-C metricbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d x-pack/auditbeat" + - TARGETS="unitTest integTest" + - os: osx - env: TARGETS="TEST_ENVIRONMENT=0 -C metricbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d x-pack/auditbeat" + - TARGETS="unitTest" + - os: linux - env: TARGETS="-C metricbeat crosscompile" go: $GO_VERSION stage: test - - os: linux - env: TARGETS="-C x-pack/metricbeat testsuite" + env: + - BUILD_CMD="mage -d x-pack/filebeat" + - TARGETS="unitTest integTest" + + - os: osx go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d x-pack/filebeat" + - TARGETS="unitTest" - # Packetbeat - os: linux - env: TARGETS="-C packetbeat testsuite" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage -d x-pack/functionbeat" + - TARGETS="unitTest integTest" - # Winlogbeat - os: linux - env: TARGETS="-C winlogbeat crosscompile" go: $GO_VERSION stage: test + env: + - BUILD_CMD="mage" + - TARGETS="docs" - # Functionbeat - os: linux - env: TARGETS="-C x-pack/functionbeat testsuite" go: $GO_VERSION - stage: test - - os: osx - env: TARGETS="TEST_ENVIRONMENT=0 -C x-pack/functionbeat testsuite" + stage: crosscompile + env: + - BUILD_CMD="make -C libbeat" + - TARGETS="gox" + + - os: linux go: $GO_VERSION - stage: test + stage: crosscompile + env: + - BUILD_CMD="make -C auditbeat" + - TARGETS="gox" - # Journalbeat - os: linux - env: TARGETS="-C journalbeat testsuite" go: $GO_VERSION - stage: test + stage: crosscompile + env: + - BUILD_CMD="make -C filebeat" + - TARGETS="gox" - # Generators - os: linux - env: TARGETS="-C generator/metricbeat test" go: $GO_VERSION - stage: test + stage: crosscompile + env: + - BUILD_CMD="make -C heartbeat" + - TARGETS="gox" + - os: linux - env: TARGETS="-C generator/beat test" go: $GO_VERSION - stage: test + stage: crosscompile + env: + - BUILD_CMD="make -C journalbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C metricbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C packetbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C winlogbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/libbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/auditbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/filebeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/functionbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/heartbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/journalbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/metricbeat" + - TARGETS="gox" + + - os: linux + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/packetbeat" + - TARGETS="gox" - # Docs - os: linux - env: TARGETS="docs" + go: $GO_VERSION + stage: crosscompile + env: + - BUILD_CMD="make -C x-pack/winlogbeat" + - TARGETS="gox" + + # Generators + - os: linux + env: + - TARGETS="-C generator/metricbeat test" + go: $GO_VERSION + stage: test + - os: linux + env: + - TARGETS="-C generator/beat test" go: $GO_VERSION stage: test # Kubernetes - os: linux + go: $GO_VERSION + stage: test install: deploy/kubernetes/.travis/setup.sh env: - - TARGETS="-C deploy/kubernetes test" - TRAVIS_K8S_VERSION=v1.8.0 - stage: test + - BUILD_CMD="mage -d deploy/kubernetes" + - TARGETS="integTest" - os: linux + go: $GO_VERSION + stage: test install: deploy/kubernetes/.travis/setup.sh env: - - TARGETS="-C deploy/kubernetes test" - TRAVIS_K8S_VERSION=v1.9.4 - stage: test + - BUILD_CMD="mage -d deploy/kubernetes" + - TARGETS="integTest" - os: linux + go: $GO_VERSION + stage: test install: deploy/kubernetes/.travis/setup.sh env: - - TARGETS="-C deploy/kubernetes test" - TRAVIS_K8S_VERSION=v1.10.0 - stage: test - # TODO include 1.11 once minikube supports it - #- os: linux - # install: deploy/kubernetes/.travis/setup.sh - # env: - # - TARGETS="-C deploy/kubernetes test" - # - TRAVIS_K8S_VERSION=v1.11.0 - # stage: test + - BUILD_CMD="mage -d deploy/kubernetes" + - TARGETS="integTest" addons: apt: update: true packages: - python-virtualenv - - libpcap-dev - xsltproc - libxml2-utils + # For Packetbeat and Filebeat netflow. + - libpcap-dev + # For building journalbeat. - libsystemd-journal-dev before_install: - - python --version + # Use conservative file modes when creating files. - umask 022 - chmod -R go-w $GOPATH/src/github.com/elastic/beats - # Docker-compose installation + + # Docker-compose installation. - sudo rm /usr/local/bin/docker-compose || true - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose - chmod +x docker-compose - sudo mv docker-compose /usr/local/bin - - if [ $TRAVIS_OS_NAME = osx ]; then pip install virtualenv; fi + # Python setup. + - python --version + ## Tell virtualenv where to find Python. + - > + if [ "$TRAVIS_OS_NAME" != "osx" ]; then + pyenv versions; + export VIRTUALENV_PYTHON=$(pyenv prefix $PYENV_VERSION)/bin/python; + fi + - if [ "$TRAVIS_OS_NAME" == "osx" ]; then pip install virtualenv; fi -# Skips installations step -install: true + # Mage tool setup. + - make mage script: - - make $TARGETS + - ${BUILD_CMD:-make} $TARGETS + +after_failure: + - /home/travis/gopath/src/github.com/elastic/beats/build/ve/linux/bin/python --version || true + - cat /home/travis/gopath/src/github.com/elastic/beats/journalbeat/build/system-tests/run/test*/journalbeat.log || true notifications: slack: diff --git a/Jenkinsfile b/Jenkinsfile index 7b1a7bd647c..d79622bfb71 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -47,7 +47,7 @@ pipeline { } /** Updating generated files for Beat. - Checks the GO environment. + Checks the Go environment. Checks the Python environment. Checks YAML files are generated. Validate that all updates were committed. diff --git a/Makefile b/Makefile index 0f3553737d2..d1f7b0294f9 100644 --- a/Makefile +++ b/Makefile @@ -1,83 +1,31 @@ -BUILD_DIR=$(CURDIR)/build -COVERAGE_DIR=$(BUILD_DIR)/coverage -BEATS?=auditbeat filebeat heartbeat journalbeat metricbeat packetbeat winlogbeat x-pack/functionbeat -PROJECTS=libbeat $(BEATS) -PROJECTS_ENV=libbeat filebeat metricbeat -PYTHON_ENV?=$(BUILD_DIR)/python-env -VIRTUALENV_PARAMS?= -FIND=find . -type f -not -path "*/vendor/*" -not -path "*/build/*" -not -path "*/.git/*" -GOLINT=golint -GOLINT_REPO=github.com/golang/lint/golint -REVIEWDOG=reviewdog -REVIEWDOG_OPTIONS?=-diff "git diff master" -REVIEWDOG_REPO=github.com/haya14busa/reviewdog/cmd/reviewdog -XPACK_SUFFIX=x-pack/ - -# PROJECTS_XPACK_PKG is a list of Beats that have independent packaging support -# in the x-pack directory (rather than having the OSS build produce both sets -# of artifacts). This will be removed once we complete the transition. -PROJECTS_XPACK_PKG=x-pack/auditbeat x-pack/filebeat -# PROJECTS_XPACK_MAGE is a list of Beats whose primary build logic is based in -# Mage. For compatibility with CI testing these projects support a subset of the -# makefile targets. After all Beats converge to primarily using Mage we can -# remove this and treat all sub-projects the same. -PROJECTS_XPACK_MAGE=x-pack/metricbeat $(PROJECTS_XPACK_PKG) +# +# Variables +# +.DEFAULT_GOAL := help # # Includes # include dev-tools/make/mage.mk +include dev-tools/make/misspell.mk +include dev-tools/make/reviewdog.mk -# Runs complete testsuites (unit, system, integration) for all beats with coverage and race detection. -# Also it builds the docs and the generators - -.PHONY: testsuite -testsuite: - @$(foreach var,$(PROJECTS) $(PROJECTS_XPACK_MAGE),$(MAKE) -C $(var) testsuite || exit 1;) - -.PHONY: setup-commit-hook -setup-commit-hook: - @cp script/pre_commit.sh .git/hooks/pre-commit - @chmod 751 .git/hooks/pre-commit +# +# Targets (sorted alphabetically) +# -stop-environments: - @$(foreach var,$(PROJECTS_ENV),$(MAKE) -C $(var) stop-environment || exit 0;) - -# Runs unit and system tests without coverage and race detection. -.PHONY: test -test: - @$(foreach var,$(PROJECTS),$(MAKE) -C $(var) test || exit 1;) - -# Runs unit tests without coverage and race detection. -.PHONY: unit -unit: - @$(foreach var,$(PROJECTS),$(MAKE) -C $(var) unit || exit 1;) - -# Crosscompile all beats. -.PHONY: crosscompile -crosscompile: - @$(foreach var,filebeat winlogbeat metricbeat heartbeat auditbeat,$(MAKE) -C $(var) crosscompile || exit 1;) - -.PHONY: coverage-report -coverage-report: - @mkdir -p $(COVERAGE_DIR) - @echo 'mode: atomic' > ./$(COVERAGE_DIR)/full.cov - @# Collects all coverage files and skips top line with mode - @$(foreach var,$(PROJECTS),tail -q -n +2 ./$(var)/$(COVERAGE_DIR)/*.cov >> ./$(COVERAGE_DIR)/full.cov || true;) - @go tool cover -html=./$(COVERAGE_DIR)/full.cov -o $(COVERAGE_DIR)/full.html - @echo "Generated coverage report $(COVERAGE_DIR)/full.html" +# Collects dashboards from all Beats and generates a zip file distribution. +.PHONY: beats-dashboards +beats-dashboards: mage + mage package:dashboards -.PHONY: update -update: notice - @$(foreach var,$(PROJECTS) $(PROJECTS_XPACK_MAGE),$(MAKE) -C $(var) update || exit 1;) - @$(MAKE) -C deploy/kubernetes all +.PHONY: check +check: mage + mage check .PHONY: clean -clean: - @rm -rf build - @$(foreach var,$(PROJECTS) $(PROJECTS_XPACK_MAGE),$(MAKE) -C $(var) clean || exit 1;) - @$(MAKE) -C generator clean - @-mage -clean 2> /dev/null +clean: mage + mage clean # Cleans up the vendor directory from unnecessary files # This should always be run after updating the dependencies @@ -85,90 +33,41 @@ clean: clean-vendor: @sh script/clean_vendor.sh -.PHONY: check -check: python-env - @$(foreach var,$(PROJECTS) dev-tools $(PROJECTS_XPACK_MAGE),$(MAKE) -C $(var) check || exit 1;) - @# Checks also python files which are not part of the beats - @$(FIND) -name *.py -exec $(PYTHON_ENV)/bin/autopep8 -d --max-line-length 120 {} \; | (! grep . -q) || (echo "Code differs from autopep8's style" && false) - @# Validate that all updates were committed - @$(MAKE) update - @$(MAKE) check-headers - @git diff | cat - @git update-index --refresh - @git diff-index --exit-code HEAD -- - -.PHONY: check-headers -check-headers: - @go get -u github.com/elastic/go-licenser - @go-licenser -d -exclude x-pack - @go-licenser -d -license Elastic x-pack - -.PHONY: add-headers -add-headers: - @go get github.com/elastic/go-licenser - @go-licenser -exclude x-pack - @go-licenser -license Elastic x-pack - -# Corrects spelling errors -.PHONY: misspell -misspell: - go get -u github.com/client9/misspell/cmd/misspell - # Ignore Kibana files (.json) - $(FIND) \ - -not -path "*.json" \ - -not -path "*.log" \ - -name '*' \ - -exec misspell -w {} \; +.PHONY: docs +docs: mage + mage docs .PHONY: fmt -fmt: add-headers python-env - @$(foreach var,$(PROJECTS) dev-tools $(PROJECTS_XPACK_MAGE),$(MAKE) -C $(var) fmt || exit 1;) - @# Cleans also python files which are not part of the beats - @$(FIND) -name "*.py" -exec $(PYTHON_ENV)/bin/autopep8 --in-place --max-line-length 120 {} \; - -.PHONY: lint -lint: - @go get $(GOLINT_REPO) $(REVIEWDOG_REPO) - $(REVIEWDOG) $(REVIEWDOG_OPTIONS) - -# Builds the documents for each beat -.PHONY: docs -docs: - @$(foreach var,$(PROJECTS),BUILD_DIR=${BUILD_DIR} $(MAKE) -C $(var) docs || exit 1;) - sh ./script/build_docs.sh dev-guide github.com/elastic/beats/docs/devguide ${BUILD_DIR} +fmt: mage + mage fmt + +# Default target. +.PHONY: help +help: + @echo Use mage rather than make. Here are the available mage targets: + @mage -l + +# Check Makefile format. +.PHONY: makelint +makelint: SHELL:=/bin/bash +makelint: + @diff <(grep ^.PHONY Makefile | sort) <(grep ^.PHONY Makefile) \ + || echo Makefile targets need to be sorted. .PHONY: notice -notice: python-env - @echo "Generating NOTICE" - @$(PYTHON_ENV)/bin/python dev-tools/generate_notice.py . - -# Sets up the virtual python environment -.PHONY: python-env -python-env: - @test -d $(PYTHON_ENV) || virtualenv $(VIRTUALENV_PARAMS) $(PYTHON_ENV) - @$(PYTHON_ENV)/bin/pip install -q --upgrade pip autopep8==1.3.5 six - @# Work around pip bug. See: https://github.com/pypa/pip/issues/4464 - @find $(PYTHON_ENV) -type d -name dist-packages -exec sh -c "echo dist-packages > {}.pth" ';' - -# Tests if apm works with the current code -.PHONY: test-apm -test-apm: - sh ./script/test_apm.sh - -### Packaging targets #### - -# Builds a snapshot release. -.PHONY: snapshot -snapshot: - @$(MAKE) SNAPSHOT=true release +notice: mage + mage update:notice # Builds a release. .PHONY: release -release: beats-dashboards - @$(foreach var,$(BEATS) $(PROJECTS_XPACK_PKG),$(MAKE) -C $(var) release || exit 1;) - @$(foreach var,$(BEATS) $(PROJECTS_XPACK_PKG), \ - test -d $(var)/build/distributions && test -n "$$(ls $(var)/build/distributions)" || exit 0; \ - mkdir -p build/distributions/$(subst $(XPACK_SUFFIX),'',$(var)) && mv -f $(var)/build/distributions/* build/distributions/$(subst $(XPACK_SUFFIX),'',$(var))/ || exit 1;) +release: mage + mage package + +# Builds a snapshot release. The Go version defined in .go-version will be +# installed and used for the build. +.PHONY: release-manager-release +release-manager-release: + ./dev-tools/run_with_go_ver $(MAKE) release # Builds a snapshot release. The Go version defined in .go-version will be # installed and used for the build. @@ -176,13 +75,25 @@ release: beats-dashboards release-manager-snapshot: @$(MAKE) SNAPSHOT=true release-manager-release -# Builds a snapshot release. The Go version defined in .go-version will be -# installed and used for the build. -.PHONY: release-manager-release -release-manager-release: - ./dev-tools/run_with_go_ver $(MAKE) release +.PHONY: setup-commit-hook +setup-commit-hook: + @cp script/pre_commit.sh .git/hooks/pre-commit + @chmod 751 .git/hooks/pre-commit -# Collects dashboards from all Beats and generates a zip file distribution. -.PHONY: beats-dashboards -beats-dashboards: mage update - @mage packageBeatDashboards +# Builds a snapshot release. +.PHONY: snapshot +snapshot: + @$(MAKE) SNAPSHOT=true release + +# Tests if apm works with the current code +.PHONY: test-apm +test-apm: + sh ./script/test_apm.sh + +.PHONY: testsuite +testsuite: mage + mage test:all + +.PHONY: update +update: mage + mage update diff --git a/NOTICE.txt b/NOTICE.txt index 05720d3c795..edfb7897d78 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -1,5 +1,5 @@ Elastic Beats -Copyright 2014-2018 Elasticsearch BV +Copyright 2014-2019 Elasticsearch BV This product includes software developed by The Apache Software Foundation (http://www.apache.org/). diff --git a/dev-tools/.gitignore b/dev-tools/.gitignore deleted file mode 100644 index 378eac25d31..00000000000 --- a/dev-tools/.gitignore +++ /dev/null @@ -1 +0,0 @@ -build diff --git a/dev-tools/Makefile b/dev-tools/Makefile deleted file mode 100644 index 32eeda92d0b..00000000000 --- a/dev-tools/Makefile +++ /dev/null @@ -1,4 +0,0 @@ -ES_BEATS?=.. - -# Path to the libbeat Makefile -include ${ES_BEATS}/libbeat/scripts/Makefile diff --git a/dev-tools/README.md b/dev-tools/README.md index cc6c91e49f2..c7f03612d51 100644 --- a/dev-tools/README.md +++ b/dev-tools/README.md @@ -8,7 +8,6 @@ The following scripts are used by the unified release process: |----------------------|-------------| | get_version | Returns the current version | | set_version | Sets the current version in all places where change is required. Doesn't commit changes. | -| deploy | Builds all artifacts for the officially supported Beats | @@ -19,31 +18,3 @@ Other scripts: |----------------------|-------------| | aggregate_coverage.py | Used to create coverage reports that contain both unit and system tests data | | merge_pr | Used to make it easier to open a PR that merges one branch into another. | - - -Import / export the dashboards of a single Beat: - -| File | Description | -|-----------------------|-------------| -| export_dashboards.py | Python script to export the Beat dashboards from Elasticsearch to a local directory| - -Running export_dashboards.py in environment ----------------------------------------------- - -If you are running the python script for the first time, you need to create the -environment by running the following commands in the `beats/dev-tools` -directory: - -``` -virtualenv env -. env/bin/activate -pip install -r requirements.txt -``` - -This creates the environment that contains all the python packages required to -run the `export_dashboards.py` script. Thus, for the next runs you just need -to enable the environment: - -``` -. env/bin/activate -``` diff --git a/dev-tools/ci/templates/travis.yml.tmpl b/dev-tools/ci/templates/travis.yml.tmpl new file mode 100644 index 00000000000..eb7447f3e06 --- /dev/null +++ b/dev-tools/ci/templates/travis.yml.tmpl @@ -0,0 +1,146 @@ +# DO NOT EDIT - AUTO-GENERATED +# This file was generated by 'mage update:travisCI' from a template at +# dev-tools/ci/templates/travis.yml.tmpl. + +sudo: required +dist: trusty +services: + - docker + +python: 2.7.14 + +language: go + +go_import_path: github.com/elastic/beats + +stages: + - name: check + - name: test + - name: crosscompile + if: type != pull_request + +env: + global: + # Cross-compile for amd64 only to speed up testing. + - GOX_FLAGS="-arch amd64" + + # Build snapshots when testing packaging. + - SNAPSHOT=true + + # Dependency version. + - DOCKER_COMPOSE_VERSION=1.21.0 + - GO_VERSION="$(cat .go-version)" + # Newer versions of minikube fail on travis. See https://github.com/kubernetes/minikube/issues/2704. + - TRAVIS_MINIKUBE_VERSION=v0.25.2 + + # Set Python version returned by pyenv. + - PYENV_VERSION=2.7.14 + +jobs: + include: +{{- range $job := .Jobs}} + - os: {{$job.OS}} + go: $GO_VERSION + stage: {{$job.Stage}} + env: +{{- range $envVar := $job.Env}} + - {{$envVar}} +{{- end}} +{{end}} + # Generators + - os: linux + env: + - TARGETS="-C generator/metricbeat test" + go: $GO_VERSION + stage: test + - os: linux + env: + - TARGETS="-C generator/beat test" + go: $GO_VERSION + stage: test + + # Kubernetes + - os: linux + go: $GO_VERSION + stage: test + install: deploy/kubernetes/.travis/setup.sh + env: + - TRAVIS_K8S_VERSION=v1.8.0 + - BUILD_CMD="mage -d deploy/kubernetes" + - TARGETS="integTest" + - os: linux + go: $GO_VERSION + stage: test + install: deploy/kubernetes/.travis/setup.sh + env: + - TRAVIS_K8S_VERSION=v1.9.4 + - BUILD_CMD="mage -d deploy/kubernetes" + - TARGETS="integTest" + - os: linux + go: $GO_VERSION + stage: test + install: deploy/kubernetes/.travis/setup.sh + env: + - TRAVIS_K8S_VERSION=v1.10.0 + - BUILD_CMD="mage -d deploy/kubernetes" + - TARGETS="integTest" + +addons: + apt: + update: true + packages: + - python-virtualenv + - xsltproc + - libxml2-utils + # For Packetbeat and Filebeat netflow. + - libpcap-dev + # For building journalbeat. + - libsystemd-journal-dev + +before_install: + # Use conservative file modes when creating files. + - umask 022 + - chmod -R go-w $GOPATH/src/github.com/elastic/beats + + # Docker-compose installation. + - sudo rm /usr/local/bin/docker-compose || true + - curl -L https://github.com/docker/compose/releases/download/${DOCKER_COMPOSE_VERSION}/docker-compose-`uname -s`-`uname -m` > docker-compose + - chmod +x docker-compose + - sudo mv docker-compose /usr/local/bin + + # Python setup. + - python --version + ## Tell virtualenv where to find Python. + - > + if [ "$TRAVIS_OS_NAME" != "osx" ]; then + pyenv versions; + export VIRTUALENV_PYTHON=$(pyenv prefix $PYENV_VERSION)/bin/python; + fi + - if [ "$TRAVIS_OS_NAME" == "osx" ]; then pip install virtualenv; fi + + # Mage tool setup. + - make mage + +script: + - ${BUILD_CMD:-make} $TARGETS + +after_failure: + - /home/travis/gopath/src/github.com/elastic/beats/build/ve/linux/bin/python --version || true + - cat /home/travis/gopath/src/github.com/elastic/beats/journalbeat/build/system-tests/run/test*/journalbeat.log || true + +notifications: + slack: + on_success: change + on_failure: always + on_pull_requests: false + rooms: + secure: "e25J5puEA31dOooTI4T+K+zrTs8XeWIGq2cgmiPt9u/g7eqWeQj1UJnVsr8GOu1RPDyuJZJHXqfrvuOYJTdHzXbwjD0JTbwwVVZMkkZW2SWZHG46HCXPiucjWXEr3hXJKBJDDpIx6VxrN7r17dejv1biQ8QuEFZfiB1H8kbH/ho=" + +after_success: + # Copy full.cov to coverage.txt because codecov.io requires this file + - test -f auditbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f auditbeat/build/coverage/full.cov + - test -f filebeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f filebeat/build/coverage/full.cov + - test -f heartbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f heartbeat/build/coverage/full.cov + - test -f libbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f libbeat/build/coverage/full.cov + - test -f metricbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f metricbeat/build/coverage/full.cov + - test -f packetbeat/build/coverage/full.cov && bash <(curl -s https://codecov.io/bash) -f packetbeat/build/coverage/full.cov diff --git a/dev-tools/cmd/module_include_list/module_include_list.go b/dev-tools/cmd/module_include_list/module_include_list.go index e5fc651d745..ad4b5745cf3 100644 --- a/dev-tools/cmd/module_include_list/module_include_list.go +++ b/dev-tools/cmd/module_include_list/module_include_list.go @@ -102,7 +102,7 @@ func main() { // Skip dirs that have no .go files. goFiles, err := filepath.Glob(filepath.Join(dir, "*.go")) if err != nil { - log.Fatal("Failed checking for .go files in package dir: %v", err) + log.Fatalf("Failed checking for .go files in package dir: %v", err) } if len(goFiles) == 0 { continue @@ -144,17 +144,17 @@ func main() { Imports: imports, }) if err != nil { - log.Fatal("Failed executing template: %v", err) + log.Fatalf("Failed executing template: %v", err) } // Create the output directory. if err = os.MkdirAll(filepath.Dir(outFile), 0755); err != nil { - log.Fatal("Failed to create output directory: %v", err) + log.Fatalf("Failed to create output directory: %v", err) } // Write the output file. if err = ioutil.WriteFile(outFile, buf.Bytes(), 0644); err != nil { - log.Fatal("Failed writing output file: %v", err) + log.Fatalf("Failed writing output file: %v", err) } } @@ -239,7 +239,7 @@ func hasInitMethod(file string) bool { } } if err := scanner.Err(); err != nil { - log.Fatal("failed scanning %v: %v", file, err) + log.Fatalf("failed scanning %v: %v", file, err) } return false } diff --git a/dev-tools/deploy b/dev-tools/deploy deleted file mode 100755 index 2b0de52e4c0..00000000000 --- a/dev-tools/deploy +++ /dev/null @@ -1,25 +0,0 @@ -#!/usr/bin/env python -import os -import argparse -from subprocess import check_call - -def main(): - parser = argparse.ArgumentParser( - description="Builds all the Beats artifacts") - parser.add_argument("--no-snapshot", action="store_true", - help="Don't append -SNAPSHOT to the version.") - args = parser.parse_args() - - dir = os.path.dirname(__file__) - os.chdir(dir + "/../") - print("Getting dependencies") - check_call("make clean", shell=True) - print("Done building Docker images.") - if args.no_snapshot: - check_call("make release", shell=True) - else: - check_call("make snapshot", shell=True) - print("All done") - -if __name__ == "__main__": - main() diff --git a/dev-tools/jenkins_ci.ps1 b/dev-tools/jenkins_ci.ps1 index e5fc5a3c1a8..f768d93e8b7 100755 --- a/dev-tools/jenkins_ci.ps1 +++ b/dev-tools/jenkins_ci.ps1 @@ -55,12 +55,4 @@ echo "Building $env:beat" exec { mage build } "Build FAILURE" echo "Unit testing $env:beat" -exec { mage goTestUnit } "mage goTestUnit FAILURE" - -echo "System testing $env:beat" -# Get a CSV list of package names. -$packages = $(go list ./... | select-string -Pattern "/vendor/" -NotMatch | select-string -Pattern "/scripts/cmd/" -NotMatch) -$packages = ($packages|group|Select -ExpandProperty Name) -join "," -exec { go test -race -c -cover -covermode=atomic -coverpkg $packages } "go test -race -cover FAILURE" -Set-Location -Path tests/system -exec { nosetests --with-timer --with-xunit --xunit-file=../../build/TEST-system.xml } "System test FAILURE" +exec { mage unitTest } "mage unitTest FAILURE" diff --git a/dev-tools/jenkins_ci.sh b/dev-tools/jenkins_ci.sh index 9ad590dee19..437118a666b 100755 --- a/dev-tools/jenkins_ci.sh +++ b/dev-tools/jenkins_ci.sh @@ -21,8 +21,6 @@ cleanup() { rm -rf $TEMP_PYTHON_ENV if docker info > /dev/null ; then - make stop-environment || true - make fix-permissions || true echo "Killing all running containers..." ids=$(docker ps -q) if [ -n "$ids" ]; then @@ -38,4 +36,5 @@ trap cleanup EXIT rm -rf ${GOPATH}/pkg cd ${beat} -RACE_DETECTOR=1 make clean check testsuite +make mage +RACE_DETECTOR=1 mage clean check build test diff --git a/dev-tools/jenkins_release.sh b/dev-tools/jenkins_release.sh index cd22480c7e3..02950637610 100755 --- a/dev-tools/jenkins_release.sh +++ b/dev-tools/jenkins_release.sh @@ -45,7 +45,7 @@ cleanup() { trap cleanup EXIT # This controls the defaults used the Jenkins package job. They can be -# overridden by setting them in the environement prior to running this script. +# overridden by setting them in the environment prior to running this script. export SNAPSHOT="${SNAPSHOT:-true}" export PLATFORMS="${PLATFORMS:-+linux/armv7 +linux/ppc64le +linux/s390x +linux/mips64}" diff --git a/dev-tools/mage/.gitignore b/dev-tools/mage/.gitignore index 378eac25d31..796b96d1c40 100644 --- a/dev-tools/mage/.gitignore +++ b/dev-tools/mage/.gitignore @@ -1 +1 @@ -build +/build diff --git a/dev-tools/mage/clean.go b/dev-tools/mage/clean.go index 912d372e1df..e13bdc795f4 100644 --- a/dev-tools/mage/clean.go +++ b/dev-tools/mage/clean.go @@ -37,17 +37,23 @@ var DefaultCleanPaths = []string{ "_meta/kibana/6/index-pattern/{{.BeatName}}.json", } -// Clean clean generated build artifacts. +// Clean clean generated build artifacts. Go globs are supported. func Clean(pathLists ...[]string) error { if len(pathLists) == 0 { pathLists = [][]string{DefaultCleanPaths} } for _, paths := range pathLists { for _, f := range paths { - f = MustExpand(f) - if err := sh.Rm(f); err != nil { + files, err := FindFiles(MustExpand(f)) + if err != nil { return err } + + for _, f := range files { + if err := sh.Rm(f); err != nil { + return err + } + } } } return nil diff --git a/dev-tools/mage/common.go b/dev-tools/mage/common.go index 58255f685fc..577f712e725 100644 --- a/dev-tools/mage/common.go +++ b/dev-tools/mage/common.go @@ -548,7 +548,7 @@ func FindFiles(globs ...string) ([]string, error) { func FindFilesRecursive(match func(path string, info os.FileInfo) bool) ([]string, error) { var matches []string err := filepath.Walk(".", func(path string, info os.FileInfo, err error) error { - if err != nil { + if err != nil && !os.IsNotExist(err) { return err } diff --git a/dev-tools/mage/dashboard.go b/dev-tools/mage/dashboard.go index 4e4279738e2..69b10d04761 100644 --- a/dev-tools/mage/dashboard.go +++ b/dev-tools/mage/dashboard.go @@ -19,8 +19,11 @@ package mage import ( "fmt" + "os" "path/filepath" + "strconv" + "github.com/magefile/mage/mg" "github.com/magefile/mage/sh" ) @@ -33,7 +36,7 @@ func ExportDashboard() error { id := EnvOr("ID", "") if id == "" { - return fmt.Errorf("Dashboad ID must be specified") + return fmt.Errorf("Dashboard ID must be specified") } beatsDir, err := ElasticBeatsDir() @@ -51,3 +54,35 @@ func ExportDashboard() error { return dashboardCmd() } + +// ImportDashboards imports dashboards to Kibana using the Beat setup command. +// +// Depends on: build, dashboard +// +// Optional environment variables: +// - KIBANA_URL: URL of Kibana +// - KIBANA_ALWAYS: Connect to Kibana without checking ES version. Default true. +// - ES_URL: URL of Elasticsearch (only used with KIBANA_ALWAYS=false). +func ImportDashboards(buildDep, dashboardDep interface{}) error { + mg.Deps(buildDep, dashboardDep) + + setupDashboards := sh.RunCmd(CWD(BeatName+binaryExtension(GOOS)), + "setup", "--dashboards", + "-E", "setup.dashboards.directory="+kibanaBuildDir) + + kibanaAlways := true + if b, err := strconv.ParseBool(os.Getenv("KIBANA_ALWAYS")); err == nil { + kibanaAlways = b + } + + var args []string + if kibanaURL := EnvOr("KIBANA_URL", ""); kibanaURL != "" { + args = append(args, "-E", "setup.kibana.host="+kibanaURL) + } + if esURL := EnvOr("ES_URL", ""); !kibanaAlways && esURL != "" { + args = append(args, "-E", "setup.elasticsearch.host="+esURL) + } + args = append(args, "-E", "setup.dashboards.always_kibana="+strconv.FormatBool(kibanaAlways)) + + return setupDashboards(args...) +} diff --git a/dev-tools/mage/docs.go b/dev-tools/mage/docs.go index 51bd96c2f22..b3186ab2be9 100644 --- a/dev-tools/mage/docs.go +++ b/dev-tools/mage/docs.go @@ -18,18 +18,57 @@ package mage import ( + "context" + "fmt" "log" + "net" + "net/http" + "os" + "os/exec" + "os/signal" + "path/filepath" + "runtime" + "strconv" + "syscall" "github.com/magefile/mage/sh" + "github.com/pkg/errors" +) + +const ( + elasticDocsRepoURL = "https://github.com/elastic/docs.git" ) type docsBuilder struct{} +type asciidocParams struct { + name string + indexFile string +} + +// DocsOption is a documentation generation option for controlling how the docs +// are built. +type DocsOption func(params *asciidocParams) + +// DocsName specifies the documentation's name (default to BeatName). +func DocsName(name string) DocsOption { + return func(params *asciidocParams) { + params.name = name + } +} + +// DocsIndexFile specifies the index file (defaults to docs/index.asciidoc). +func DocsIndexFile(file string) DocsOption { + return func(params *asciidocParams) { + params.indexFile = file + } +} + // Docs holds the utilities for building documentation. var Docs = docsBuilder{} // FieldDocs generates docs/fields.asciidoc from the specified fields.yml file. -func (b docsBuilder) FieldDocs(fieldsYML string) error { +func (docsBuilder) FieldDocs(fieldsYML string) error { // Run the docs_collector.py script. ve, err := PythonVirtualenv() if err != nil { @@ -46,6 +85,7 @@ func (b docsBuilder) FieldDocs(fieldsYML string) error { return err } + // TODO: Port this script to Go. log.Println(">> Generating docs/fields.asciidoc for", BeatName) return sh.Run(python, LibbeatDir("scripts/generate_fields_docs.py"), fieldsYML, // Path to fields.yml. @@ -53,3 +93,92 @@ func (b docsBuilder) FieldDocs(fieldsYML string) error { esBeats, // Path to general beats folder. "--output_path", OSSBeatDir()) // It writes to {output_path}/docs/fields.asciidoc. } + +func (b docsBuilder) AsciidocBook(opts ...DocsOption) error { + params := asciidocParams{ + name: BeatName, + indexFile: CWD("docs/index.asciidoc"), + } + for _, opt := range opts { + opt(¶ms) + } + + repo, err := GetProjectRepoInfo() + if err != nil { + return err + } + + cloneDir := CreateDir(filepath.Join(repo.RootDir, "build/elastic_docs_repo")) + + // Clone if elastic_docs_repo does not exist. + if _, err := os.Stat(cloneDir); err != nil { + log.Println("Cloning elastic/docs to", cloneDir) + if err = sh.Run("git", "clone", "--depth=1", elasticDocsRepoURL, cloneDir); err != nil { + return err + } + } else { + log.Println("Using existing elastic/docs at", cloneDir) + } + + // Render HTML. + htmlDir := CWD("build/html_docs", params.name) + args := []string{ + filepath.Join(cloneDir, "build_docs.pl"), + "--chunk=1", + "--doc", params.indexFile, + "--out", htmlDir, + } + fmt.Println(">> Building HTML docs at", filepath.Join(htmlDir, "index.html")) + if err := sh.Run("perl", args...); err != nil { + return err + } + + // Serve docs with and HTTP server and open the browser. + if preview, _ := strconv.ParseBool(os.Getenv("PREVIEW")); preview { + srv := b.servePreview(htmlDir) + url := "http://" + srv.Addr + fmt.Println("Serving docs preview at", url) + b.openBrowser(url) + + // Wait + fmt.Println("Ctrl+C to stop") + sigs := make(chan os.Signal, 1) + signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) + <-sigs + srv.Shutdown(context.Background()) + } + return nil +} + +// open opens the specified URL in the default browser. +func (docsBuilder) openBrowser(url string) error { + var cmd string + var args []string + + switch runtime.GOOS { + case "darwin": + cmd = "open" + case "windows": + cmd = "cmd" + args = []string{"/c", "start"} + default: + cmd = "xdg-open" + } + args = append(args, url) + return exec.Command(cmd, args...).Start() +} + +func (docsBuilder) servePreview(dir string) *http.Server { + server := &http.Server{ + Addr: net.JoinHostPort("localhost", EnvOr("PREVIEW_PORT", "8000")), + Handler: http.FileServer(http.Dir(dir)), + } + + go func() { + if err := server.ListenAndServe(); err != nil { + panic(errors.Wrap(err, "failed to start docs preview")) + } + }() + + return server +} diff --git a/dev-tools/mage/fields.go b/dev-tools/mage/fields.go index 60716592a95..bc4f6cee68f 100644 --- a/dev-tools/mage/fields.go +++ b/dev-tools/mage/fields.go @@ -25,6 +25,35 @@ import ( "github.com/magefile/mage/sh" ) +const ( + // FieldsYML specifies the path to the file containing the field data for + // the Beat (formerly this was ./fields.yml). + FieldsYML = "build/fields/fields.yml" + // FieldsYMLRoot specifies the filename of the project's root level + // fields.yml file (this is being replaced by FieldsYML). + FieldsYMLRoot = "fields.yml" + // FieldsAllYML specifies the path to the file containing the field data for + // the Beat from all license types. It's generally used for making documentation. + FieldsAllYML = "build/fields/fields.all.yml" +) + +// FieldsBuilder is the interface projects to implement for building field data. +type FieldsBuilder interface { + // Generate all fields.go files. + FieldsGo() error + + // Generate build/fields/fields.yml containing fields for the Beat. This + // file may need be copied to fields.yml if tests depend on it, but those + // tests should be updated. + FieldsYML() error + + // Generate build/fields/fields.all.yml containing all possible fields + // for all license types. (Used for field documentation.) + FieldsAllYML() error + + All() // Build everything. +} + // GenerateFieldsYAML generates a fields.yml file for a Beat. This will include // the common fields specified by libbeat, the common fields for the Beat, // and any additional fields.yml files you specify. @@ -54,7 +83,7 @@ func generateFieldsYAML(baseDir, output string, moduleDirs ...string) error { filepath.Join(beatsDir, globalFieldsCmdPath), "-es_beats_path", beatsDir, "-beat_path", baseDir, - "-out", output, + "-out", CreateDir(output), ) return globalFieldsCmd(moduleDirs...) @@ -78,7 +107,7 @@ func GenerateFieldsGo(fieldsYML, out string) error { filepath.Join(beatsDir, assetCmdPath), "-pkg", "include", "-in", fieldsYML, - "-out", createDir(out), + "-out", CreateDir(out), "-license", toLibbeatLicenseName(BeatLicense), BeatName, ) @@ -97,11 +126,15 @@ func GenerateModuleFieldsGo(moduleDir string) error { return err } + if !filepath.IsAbs(moduleDir) { + moduleDir = CWD(moduleDir) + } + moduleFieldsCmd := sh.RunCmd("go", "run", filepath.Join(beatsDir, moduleFieldsCmdPath), "-beat", BeatName, "-license", toLibbeatLicenseName(BeatLicense), - filepath.Join(CWD(), moduleDir), + moduleDir, ) return moduleFieldsCmd() @@ -110,9 +143,7 @@ func GenerateModuleFieldsGo(moduleDir string) error { // GenerateModuleIncludeListGo generates an include/list.go file containing // a import statement for each module and dataset. func GenerateModuleIncludeListGo() error { - return GenerateIncludeListGo(nil, []string{ - filepath.Join(CWD(), "module"), - }) + return GenerateIncludeListGo(nil, []string{"module"}) } // GenerateIncludeListGo generates an include/list.go file containing imports @@ -133,9 +164,15 @@ func GenerateIncludeListGo(importDirs []string, moduleDirs []string) error { var args []string for _, dir := range importDirs { + if !filepath.IsAbs(dir) { + dir = CWD(dir) + } args = append(args, "-import", dir) } for _, dir := range moduleDirs { + if !filepath.IsAbs(dir) { + dir = CWD(dir) + } args = append(args, "-moduleDir", dir) } diff --git a/dev-tools/mage/fmt.go b/dev-tools/mage/fmt.go index 0a2c04a4249..7f6a839d621 100644 --- a/dev-tools/mage/fmt.go +++ b/dev-tools/mage/fmt.go @@ -45,7 +45,9 @@ var ( func Format() { // Don't run AddLicenseHeaders and GoImports concurrently because they // both can modify the same files. - mg.Deps(AddLicenseHeaders) + if BeatProjectType != CommunityProject { + mg.Deps(AddLicenseHeaders) + } mg.Deps(GoImports, PythonAutopep8) } diff --git a/dev-tools/mage/godaemon.go b/dev-tools/mage/godaemon.go index c856fca6448..e248b24e47f 100644 --- a/dev-tools/mage/godaemon.go +++ b/dev-tools/mage/godaemon.go @@ -21,6 +21,7 @@ import ( "errors" "log" "os" + "path/filepath" ) var ( @@ -42,7 +43,8 @@ func BuildGoDaemon() error { } // Test if binaries are up-to-date. - output := MustExpand("build/golang-crossbuild/god-{{.Platform.GOOS}}-{{.Platform.Arch}}") + crossbuildDir := "build/golang-crossbuild" + output := filepath.Join(crossbuildDir, MustExpand("god-{{.Platform.GOOS}}-{{.Platform.Arch}}")) input := MustExpand("{{ elastic_beats_dir }}/dev-tools/vendor/github.com/tsg/go-daemon/god.c") if IsUpToDate(output, input) { log.Println(">>> buildGoDaemon is up-to-date for", Platform.Name) @@ -68,7 +70,7 @@ func BuildGoDaemon() error { compileCmd = append(compileCmd, "-m32") } - defer DockerChown(output) + defer DockerChown(crossbuildDir) return RunCmds(compileCmd) } diff --git a/dev-tools/mage/integtest.go b/dev-tools/mage/integtest.go index ddc59fb8f69..b6ea92766f4 100644 --- a/dev-tools/mage/integtest.go +++ b/dev-tools/mage/integtest.go @@ -183,6 +183,27 @@ func runInIntegTestEnv(mageTarget string, test func() error, passThroughEnvVars } magePath := filepath.Join("/go/src", repo.ImportPath, "build/mage-linux-amd64") + // Pass through all variables beginning with BEAT_. + envVars := map[string]string{} + for _, env := range os.Environ() { + parts := strings.SplitN(env, "=", 2) + if len(parts) != 2 { + continue + } + key, value := parts[0], parts[1] + if strings.HasPrefix(key, "BEAT_") { + envVars[key] = value + } + } + for _, varName := range passThroughEnvVars { + if value, isSet := os.LookupEnv(varName); isSet { + envVars[varName] = value + } + } + if mg.Verbose() { + envVars[mg.VerboseEnv] = "1" + } + // Build docker-compose args. args := []string{"-p", dockerComposeProjectName(), "run", "-e", "DOCKER_COMPOSE_PROJECT_NAME=" + dockerComposeProjectName(), @@ -194,11 +215,9 @@ func runInIntegTestEnv(mageTarget string, test func() error, passThroughEnvVars if err != nil { return err } - for _, envVar := range passThroughEnvVars { - args = append(args, "-e", envVar+"="+os.Getenv(envVar)) - } - if mg.Verbose() { - args = append(args, "-e", "MAGEFILE_VERBOSE=1") + + for key, value := range envVars { + args = append(args, "-e", key+"="+value) } args = append(args, "-e", beatsDockerIntegrationTestEnvVar+"=true", @@ -278,6 +297,9 @@ func integTestDockerComposeEnvVars() (map[string]string, error) { } return map[string]string{ + // When using mage -d or make -C the PWD env var is not changed so we + // must manually set it to reflect the CWD. + "PWD": CWD(), "ES_BEATS": esBeatsDir, "STACK_ENVIRONMENT": StackEnvironment, // Deprecated use STACK_ENVIRONMENT instead (it's more descriptive). diff --git a/dev-tools/mage/kibana.go b/dev-tools/mage/kibana.go index aeae72d83b8..6c59aeb03b3 100644 --- a/dev-tools/mage/kibana.go +++ b/dev-tools/mage/kibana.go @@ -25,21 +25,16 @@ import ( "github.com/pkg/errors" ) +const kibanaBuildDir = "build/kibana" + // KibanaDashboards collects the Kibana dashboards files and generates the // index patterns based on the fields.yml file. It outputs to build/kibana. // Use PackageKibanaDashboardsFromBuildDir() with this. func KibanaDashboards(moduleDirs ...string) error { - var kibanaBuildDir = "build/kibana" - if err := os.MkdirAll(kibanaBuildDir, 0755); err != nil { return err } - // Create symlink from old directory so `make beats-dashboards` works. - if err := os.Symlink(filepath.Join("..", kibanaBuildDir), "_meta/kibana.generated"); err != nil && !os.IsExist(err) && !os.IsNotExist(err) { - return err - } - // Copy the OSS Beat's common dashboards if they exist. This assumes that // X-Pack Beats only add dashboards with modules (this will require a // change if we have X-Pack only Beats). @@ -81,13 +76,18 @@ func KibanaDashboards(moduleDirs ...string) error { return err } + // Sanity check that fields.yml exists. + if _, err := os.Stat(FieldsYML); err != nil { + return errors.Wrapf(err, "failed checking if %v exists", FieldsYML) + } + // Generate Kibana index pattern files from fields.yml. indexPatternCmd := sh.RunCmd("go", "run", filepath.Join(esBeatsDir, "dev-tools/cmd/kibana_index_pattern/kibana_index_pattern.go"), "-beat", BeatName, "-version", beatVersion, "-index", BeatIndexPrefix+"-*", - "-fields", "fields.yml", + "-fields", FieldsYML, "-out", kibanaBuildDir, ) diff --git a/dev-tools/mage/modules.go b/dev-tools/mage/modules.go index 80fc4c2f7c5..988b22bd589 100644 --- a/dev-tools/mage/modules.go +++ b/dev-tools/mage/modules.go @@ -22,8 +22,55 @@ import ( "os" "path/filepath" "strings" + + "github.com/pkg/errors" ) +type moduleOptions struct { + Enable map[string]struct{} + ExtraVars map[string]interface{} + InputGlobs []string + OutputDir string +} + +// ModuleOption is an option for control build behavior w.r.t. modules. +type ModuleOption func(params *moduleOptions) + +// EnableModule enables the module with the given name (if found). +func EnableModule(name string) ModuleOption { + return func(params *moduleOptions) { + if params.Enable == nil { + params.Enable = map[string]struct{}{} + } + params.Enable[name] = struct{}{} + } +} + +// SetTemplateVariable sets a key/value pair that will be available with +// rendering a config template. +func SetTemplateVariable(key string, value interface{}) ModuleOption { + return func(params *moduleOptions) { + if params.ExtraVars == nil { + params.ExtraVars = map[string]interface{}{} + } + params.ExtraVars[key] = value + } +} + +// OutputDir specifies the directory where the output will be written. +func OutputDir(outputDir string) ModuleOption { + return func(params *moduleOptions) { + params.OutputDir = outputDir + } +} + +// InputGlobs is a list of globs to use when looking for files. +func InputGlobs(inputGlobs ...string) ModuleOption { + return func(params *moduleOptions) { + params.InputGlobs = inputGlobs + } +} + var modulesDConfigTemplate = ` # Module: {{.Module}} # Docs: https://www.elastic.co/guide/en/beats/{{.BeatName}}/{{ beat_doc_branch }}/{{.BeatName}}-module-{{.Module}}.html @@ -33,12 +80,20 @@ var modulesDConfigTemplate = ` // GenerateDirModulesD generates a modules.d directory containing the // .yml.disabled files. It adds a header to each file containing a // link to the documentation. -func GenerateDirModulesD() error { - if err := os.RemoveAll("modules.d"); err != nil { +func GenerateDirModulesD(opts ...ModuleOption) error { + args := moduleOptions{ + OutputDir: "modules.d", + InputGlobs: []string{"module/*/_meta/config.yml"}, + } + for _, f := range opts { + f(&args) + } + + if err := os.RemoveAll(args.OutputDir); err != nil { return err } - shortConfigs, err := filepath.Glob("module/*/_meta/config.yml") + shortConfigs, err := FindFiles(args.InputGlobs...) if err != nil { return err } @@ -55,16 +110,36 @@ func GenerateDirModulesD() error { return err } + params := map[string]interface{}{ + "GOOS": EnvOr("DEV_OS", "linux"), + "GOARCH": EnvOr("DEV_ARCH", "amd64"), + "Reference": false, + "Docker": false, + } + for k, v := range args.ExtraVars { + params[k] = v + } + expandedConfig, err := Expand(string(config), params) + if err != nil { + return errors.Wrapf(err, "failed expanding config file=%v", f) + } + data, err := Expand(modulesDConfigTemplate, map[string]interface{}{ "Module": moduleName, - "Config": string(config), + "Config": string(expandedConfig), }) if err != nil { return err } - target := filepath.Join("modules.d", moduleName+".yml.disabled") - err = ioutil.WriteFile(createDir(target), []byte(data), 0644) + target := filepath.Join(args.OutputDir, moduleName) + if _, enabled := args.Enable[moduleName]; enabled { + target += ".yml" + } else { + target += ".yml.disabled" + } + + err = ioutil.WriteFile(CreateDir(target), []byte(data), 0644) if err != nil { return err } diff --git a/dev-tools/mage/pkg.go b/dev-tools/mage/pkg.go index 6802be3c2ed..19e9f1bb2b3 100644 --- a/dev-tools/mage/pkg.go +++ b/dev-tools/mage/pkg.go @@ -156,7 +156,7 @@ func TestPackages(options ...TestPackagesOption) error { args = append(args, "--modules.d") } - args = append(args, "-files", MustExpand("{{.PWD}}/build/distributions/*")) + args = append(args, "-files", CWD("build/distributions/*")) if out, err := goTest(args...); err != nil { if !mg.Verbose() { diff --git a/dev-tools/mage/pkgspecs.go b/dev-tools/mage/pkgspecs.go index ba8d403056b..7eb04761076 100644 --- a/dev-tools/mage/pkgspecs.go +++ b/dev-tools/mage/pkgspecs.go @@ -76,14 +76,18 @@ func UseElasticBeatWithoutXPackPackaging() { // MustUsePackaging will load a named spec from a named file, if any errors // occurs when loading the specs it will panic. // -// NOTE: we assume that specFile is relative to the beatsDir. +// NOTE: we assume that specFile is relative to the beatsDir if not absolute. func MustUsePackaging(specName, specFile string) { - beatsDir, err := ElasticBeatsDir() - if err != nil { - panic(err) + if !filepath.IsAbs(specFile) { + beatsDir, err := ElasticBeatsDir() + if err != nil { + panic(err) + } + + specFile = filepath.Join(beatsDir, specFile) } - err = LoadNamedSpec(specName, filepath.Join(beatsDir, specFile)) + err := LoadNamedSpec(specName, specFile) if err != nil { panic(err) } diff --git a/dev-tools/mage/settings.go b/dev-tools/mage/settings.go index c5c46667783..7940a0b65f2 100644 --- a/dev-tools/mage/settings.go +++ b/dev-tools/mage/settings.go @@ -65,6 +65,8 @@ var ( BeatLicense = EnvOr("BEAT_LICENSE", "ASL 2.0") BeatURL = EnvOr("BEAT_URL", "https://www.elastic.co/products/beats/"+BeatName) + BeatProjectType ProjectType + Snapshot bool versionQualified bool @@ -101,12 +103,25 @@ func init() { Snapshot, err = strconv.ParseBool(EnvOr("SNAPSHOT", "false")) if err != nil { - panic(errors.Errorf("failed to parse SNAPSHOT env value", err)) + panic(errors.Wrap(err, "failed to parse SNAPSHOT env value")) } versionQualifier, versionQualified = os.LookupEnv("VERSION_QUALIFIER") } +// ProjectType specifies the type of project (OSS vs X-Pack). +type ProjectType uint8 + +// Project types. +const ( + OSSProject ProjectType = iota + XPackProject + CommunityProject +) + +// ErrUnknownProjectType is returned if an unknown ProjectType value is used. +var ErrUnknownProjectType = fmt.Errorf("unknown ProjectType") + // EnvMap returns map containing the common settings variables and all variables // from the environment. args are appended to the output prior to adding the // environment variables (so env vars have the highest precedence). diff --git a/dev-tools/mage/target/build/build.go b/dev-tools/mage/target/build/build.go new file mode 100644 index 00000000000..3e4d1f41ca7 --- /dev/null +++ b/dev-tools/mage/target/build/build.go @@ -0,0 +1,48 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package build + +import ( + "github.com/elastic/beats/dev-tools/mage" +) + +// Build builds the Beat binary. +func Build() error { + return mage.Build(mage.DefaultBuildArgs()) +} + +// GolangCrossBuild build the Beat binary inside of the golang-builder. +// Do not use directly, use crossBuild instead. +func GolangCrossBuild() error { + return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) +} + +// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). +func BuildGoDaemon() error { + return mage.BuildGoDaemon() +} + +// CrossBuild cross-builds the beat for all target platforms. +func CrossBuild() error { + return mage.CrossBuild() +} + +// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. +func CrossBuildGoDaemon() error { + return mage.CrossBuildGoDaemon() +} diff --git a/dev-tools/mage/target/common/check.go b/dev-tools/mage/target/common/check.go new file mode 100644 index 00000000000..35d348c51e7 --- /dev/null +++ b/dev-tools/mage/target/common/check.go @@ -0,0 +1,41 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package common + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" +) + +var checkDeps []interface{} + +// RegisterCheckDeps registers dependencies of the Check target. +func RegisterCheckDeps(deps ...interface{}) { + checkDeps = append(checkDeps, deps...) +} + +// Check formats code, updates generated content, check for common errors, and +// checks for any modified files. +func Check() { + deps := make([]interface{}, 0, len(checkDeps)+2) + deps = append(deps, mage.Format) + deps = append(deps, checkDeps...) + deps = append(deps, mage.Check) + mg.SerialDeps(deps...) +} diff --git a/dev-tools/mage/target/common/clean.go b/dev-tools/mage/target/common/clean.go new file mode 100644 index 00000000000..876722d8c62 --- /dev/null +++ b/dev-tools/mage/target/common/clean.go @@ -0,0 +1,25 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package common + +import "github.com/elastic/beats/dev-tools/mage" + +// Clean cleans all generated files and build artifacts. +func Clean() error { + return mage.Clean() +} diff --git a/dev-tools/mage/target/common/fmt.go b/dev-tools/mage/target/common/fmt.go new file mode 100644 index 00000000000..0068ab412b9 --- /dev/null +++ b/dev-tools/mage/target/common/fmt.go @@ -0,0 +1,29 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package common + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" +) + +// Fmt formats source code (.go and .py) and adds license headers. +func Fmt() { + mg.Deps(mage.Format) +} diff --git a/dev-tools/mage/target/common/shared.go b/dev-tools/mage/target/common/shared.go new file mode 100644 index 00000000000..7db2e4d5d5f --- /dev/null +++ b/dev-tools/mage/target/common/shared.go @@ -0,0 +1,25 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package common + +import "github.com/elastic/beats/dev-tools/mage" + +// DumpVariables writes the template variables and values to stdout. +func DumpVariables() error { + return mage.DumpVariables() +} diff --git a/dev-tools/mage/target/dashboards/dashboards.go b/dev-tools/mage/target/dashboards/dashboards.go new file mode 100644 index 00000000000..943d6a8aab1 --- /dev/null +++ b/dev-tools/mage/target/dashboards/dashboards.go @@ -0,0 +1,64 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package dashboards + +import ( + "github.com/magefile/mage/mg" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" +) + +var ( + buildDep interface{} + collectDashboardsDep interface{} +) + +// RegisterImportDeps registers dependencies of the Import target. +func RegisterImportDeps(build, collectDashboards interface{}) { + buildDep = build + collectDashboardsDep = collectDashboards +} + +// Dashboards target namespace. +type Dashboards mg.Namespace + +// Import imports dashboards to Kibana using the Beat setup command. +// +// Depends on: build, dashboard +// +// Optional environment variables: +// - KIBANA_URL: URL of Kibana +// - KIBANA_ALWAYS: Connect to Kibana without checking ES version. Default true. +// - ES_URL: URL of Elasticsearch (only used with KIBANA_ALWAYS=false). +func (Dashboards) Import() error { + if buildDep == nil || collectDashboardsDep == nil { + return errors.New("dashboard.RegisterImportDeps() must be called") + } + return mage.ImportDashboards(buildDep, collectDashboardsDep) +} + +// Export exports a dashboard from Kibana and writes it into the correct +// directory. +// +// Required environment variables: +// - MODULE: Name of the module +// - ID: Dashboard ID +func (Dashboards) Export() error { + return mage.ExportDashboard() +} diff --git a/dev-tools/mage/target/docs/docs.go b/dev-tools/mage/target/docs/docs.go new file mode 100644 index 00000000000..cc3f950637d --- /dev/null +++ b/dev-tools/mage/target/docs/docs.go @@ -0,0 +1,40 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package docs + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" +) + +var ( + docsDeps []interface{} +) + +// RegisterDeps registers dependencies of the Docs target. +func RegisterDeps(deps ...interface{}) { + docsDeps = append(docsDeps, deps...) +} + +// Docs generates the documentation for the Beat. Set PREVIEW=true to +// automatically open the browser to the docs. +func Docs() error { + mg.SerialDeps(docsDeps...) + return mage.Docs.AsciidocBook() +} diff --git a/dev-tools/mage/target/integtest/integtest.go b/dev-tools/mage/target/integtest/integtest.go new file mode 100644 index 00000000000..cc596f60b91 --- /dev/null +++ b/dev-tools/mage/target/integtest/integtest.go @@ -0,0 +1,82 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package integtest + +import ( + "context" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/test" +) + +func init() { + test.RegisterDeps(IntegTest) +} + +var ( + goTestDeps, pythonTestDeps []interface{} + whitelistedEnvVars []string +) + +// RegisterGoTestDeps registers dependencies of the GoIntegTest target. +func RegisterGoTestDeps(deps ...interface{}) { + goTestDeps = append(goTestDeps, deps...) +} + +// RegisterPythonTestDeps registers dependencies of the PythonIntegTest target. +func RegisterPythonTestDeps(deps ...interface{}) { + pythonTestDeps = append(pythonTestDeps, deps...) +} + +// WhitelistEnvVar whitelists an environment variable to enabled it to be +// passed into the clean integration test environment (Docker). +func WhitelistEnvVar(key ...string) { + whitelistedEnvVars = append(whitelistedEnvVars, key...) +} + +// IntegTest executes integration tests (it uses Docker to run the tests). +func IntegTest() { + mage.AddIntegTestUsage() + defer mage.StopIntegTestEnv() + mg.SerialDeps(GoIntegTest, PythonIntegTest) +} + +// GoIntegTest executes the Go integration tests. +// Use TEST_COVERAGE=true to enable code coverage profiling. +// Use RACE_DETECTOR=true to enable the race detector. +func GoIntegTest(ctx context.Context) error { + if !mage.IsInIntegTestEnv() { + mg.SerialDeps(goTestDeps...) + } + return mage.RunIntegTest("goIntegTest", func() error { + return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) + }, whitelistedEnvVars...) +} + +// PythonIntegTest executes the python system tests in the integration environment (Docker). +func PythonIntegTest(ctx context.Context) error { + if !mage.IsInIntegTestEnv() { + mg.SerialDeps(pythonTestDeps...) + } + return mage.RunIntegTest("pythonIntegTest", func() error { + mg.Deps(mage.BuildSystemTestBinary) + return mage.PythonNoseTest(mage.DefaultPythonTestIntegrationArgs()) + }, whitelistedEnvVars...) +} diff --git a/dev-tools/mage/target/pkg/test.go b/dev-tools/mage/target/pkg/test.go new file mode 100644 index 00000000000..ded639fc5b3 --- /dev/null +++ b/dev-tools/mage/target/pkg/test.go @@ -0,0 +1,26 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package pkg + +import "github.com/elastic/beats/dev-tools/mage" + +// PackageTest tests the generated packages in build/distributions. It checks +// things like file ownership/mode, package attributes, etc. +func PackageTest() error { + return mage.TestPackages() +} diff --git a/dev-tools/mage/target/test/test.go b/dev-tools/mage/target/test/test.go new file mode 100644 index 00000000000..cab213de22d --- /dev/null +++ b/dev-tools/mage/target/test/test.go @@ -0,0 +1,35 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package test + +import "github.com/magefile/mage/mg" + +var ( + testDeps []interface{} +) + +// RegisterDeps registers dependencies of the Test target (register your targets +// that execute tests). +func RegisterDeps(deps ...interface{}) { + testDeps = append(testDeps, deps...) +} + +// Test runs all available tests (unitTest + integTest). +func Test() { + mg.SerialDeps(testDeps...) +} diff --git a/dev-tools/mage/target/unittest/unittest.go b/dev-tools/mage/target/unittest/unittest.go new file mode 100644 index 00000000000..69e68300d2a --- /dev/null +++ b/dev-tools/mage/target/unittest/unittest.go @@ -0,0 +1,65 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package unittest + +import ( + "context" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/test" +) + +func init() { + test.RegisterDeps(UnitTest) +} + +var ( + goTestDeps, pythonTestDeps []interface{} +) + +// RegisterGoTestDeps registers dependencies of the GoUnitTest target. +func RegisterGoTestDeps(deps ...interface{}) { + goTestDeps = append(goTestDeps, deps...) +} + +// RegisterPythonTestDeps registers dependencies of the PythonUnitTest target. +func RegisterPythonTestDeps(deps ...interface{}) { + pythonTestDeps = append(pythonTestDeps, deps...) +} + +// UnitTest executes the unit tests (Go and Python). +func UnitTest() { + mg.SerialDeps(GoUnitTest, PythonUnitTest) +} + +// GoUnitTest executes the Go unit tests. +// Use TEST_COVERAGE=true to enable code coverage profiling. +// Use RACE_DETECTOR=true to enable the race detector. +func GoUnitTest(ctx context.Context) error { + mg.SerialCtxDeps(ctx, goTestDeps...) + return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) +} + +// PythonUnitTest executes the python system tests. +func PythonUnitTest() error { + mg.SerialDeps(pythonTestDeps...) + mg.Deps(mage.BuildSystemTestBinary) + return mage.PythonNoseTest(mage.DefaultPythonTestUnitArgs()) +} diff --git a/dev-tools/make/gox.mk b/dev-tools/make/gox.mk new file mode 100644 index 00000000000..91b6a7c19bf --- /dev/null +++ b/dev-tools/make/gox.mk @@ -0,0 +1,30 @@ +# +# gox is a tool to cross-compile Go binaries. cgo is not used when compiling. +# This is quick smoke test to ensure that nothing is broken at compile time by +# the introduction of CGO-only code. +# + +# +# Variables +# +GOX_OS ?= linux darwin windows freebsd netbsd openbsd +GOX_OSARCH ?= !darwin/arm !darwin/arm64 !darwin/386 +GOX_FLAGS ?= +GOX_DISABLE ?= +GOX_IMPORT_PATH ?= github.com/mitchellh/gox +GOX_PRESENT := $(shell command -v gox 2> /dev/null) + +# +# Targets +# +.PHONY: gox +gox: +ifndef GOX_DISABLE +ifndef GOX_PRESENT + go get -u $(GOX_IMPORT_PATH) +endif + mkdir -p build/gox + gox -output="build/gox/{{.Dir}}-{{.OS}}-{{.Arch}}" -os="$(strip $(GOX_OS))" -osarch="$(strip $(GOX_OSARCH))" ${GOX_FLAGS} +else + @echo gox target is disabled. +endif diff --git a/dev-tools/make/mage.mk b/dev-tools/make/mage.mk index b27ee64af6a..0214f600ffb 100644 --- a/dev-tools/make/mage.mk +++ b/dev-tools/make/mage.mk @@ -1,8 +1,18 @@ +# +# Mage (https://magefile.org) is the main build tool used. +# + +# +# Variables +# MAGE_VERSION ?= v1.8.0 MAGE_PRESENT := $(shell mage --version 2> /dev/null | grep $(MAGE_VERSION)) MAGE_IMPORT_PATH ?= github.com/elastic/beats/vendor/github.com/magefile/mage export MAGE_IMPORT_PATH +# +# Targets +# .PHONY: mage mage: ifndef MAGE_PRESENT diff --git a/dev-tools/make/mage_wrapper.mk b/dev-tools/make/mage_wrapper.mk new file mode 100644 index 00000000000..e1565d31cac --- /dev/null +++ b/dev-tools/make/mage_wrapper.mk @@ -0,0 +1,50 @@ +# This is a minimal Makefile for Beats that are built with Mage. Its only +# responsibility is to provide compatibility with existing Jenkins and Travis +# setups. + +# +# Variables +# +.DEFAULT_GOAL := help +PWD := $(CURDIR) + +# +# Includes +# +include $(ES_BEATS)/dev-tools/make/mage.mk + +# +# Targets (alphabetically sorted). +# + +.PHONY: check +check: mage + mage check + +.PHONY: clean +clean: mage + mage clean + +.PHONY: fmt +fmt: mage + mage fmt + +# Default target. +.PHONY: help +help: + @echo Use mage rather than make. Here are the available mage targets: + @mage -l + +.PHONY: release +release: mage + mage package + +.PHONY: testsuite +testsuite: mage + @rm -f build/TEST-go-integration.out + mage update build unitTest integTest || ( cat build/TEST-go-integration.out && false ) + +.PHONY: update +update: mage + mage update + diff --git a/dev-tools/make/misspell.mk b/dev-tools/make/misspell.mk new file mode 100644 index 00000000000..400dd5eabe6 --- /dev/null +++ b/dev-tools/make/misspell.mk @@ -0,0 +1,27 @@ +# +# misspell is a tool that corrects common misspellings found in files. +# + +# +# Variables +# +MISSPELL_IMPORT_PATH ?= github.com/client9/misspell/cmd/misspell +MISSPELL_PRESENT := $(shell command -v misspell 2> /dev/null) +MISSPELL_FIND ?= find . -type f \ + -not -path "*/vendor/*" \ + -not -path "*/build/*" \ + -not -path "*/.git/*" \ + -not -path "*.json" \ + -not -path "*.log" \ + -name '*' + +# +# Targets +# + +.PHONY: misspell +misspell: +ifndef MISSPELL_PRESENT + go get -u $(MISSPELL_IMPORT_PATH) +endif + $(MISSPELL_FIND) -exec misspell -w {} \; diff --git a/dev-tools/make/oss.mk b/dev-tools/make/oss.mk new file mode 100644 index 00000000000..a64d9ef3ffa --- /dev/null +++ b/dev-tools/make/oss.mk @@ -0,0 +1,12 @@ +# +# Variables +# +ES_BEATS ?= .. +PYTHON_ENV ?= $(ES_BEATS) + +# +# Includes +# +include $(ES_BEATS)/dev-tools/make/mage_wrapper.mk +include $(ES_BEATS)/dev-tools/make/misspell.mk +include $(ES_BEATS)/dev-tools/make/gox.mk diff --git a/dev-tools/make/python.mk b/dev-tools/make/python.mk new file mode 100644 index 00000000000..a37746303cb --- /dev/null +++ b/dev-tools/make/python.mk @@ -0,0 +1,17 @@ +# +# Variables +# +PYTHON_ENV ?= . +PYTHON_VE_DIR ?= $(PYTHON_ENV)/build/ve/$(shell uname -s | tr A-Z a-z) +PYTHON_VE_REQS ?= $(ES_BEATS)/libbeat/tests/system/requirements.txt + +# +# Targets +# + +# Create a Python virtualenv. All Beats share the same virtual environment. +python-env: $(PYTHON_VE_DIR)/bin/activate +$(PYTHON_VE_DIR)/bin/activate: $(ES_BEATS)/libbeat/tests/system/requirements.txt + @test -d $(PYTHON_VE_DIR) || virtualenv $(PYTHON_VE_DIR) + @$(PYTHON_VE_DIR)/bin/pip install -Ur $(PYTHON_VE_REQS) + @touch $(PYTHON_VE_DIR)/bin/activate diff --git a/dev-tools/make/reviewdog.mk b/dev-tools/make/reviewdog.mk new file mode 100644 index 00000000000..5e3c0d2c145 --- /dev/null +++ b/dev-tools/make/reviewdog.mk @@ -0,0 +1,32 @@ +# +# Variables +# +REVIEWDOG_BRANCH ?= master +REVIEWDOG_OPTIONS ?= -diff "git diff $(REVIEWDOG_BRANCH)" +REVIEWDOG_CMD ?= reviewdog +REVIEWDOG_IMPORT_PATH ?= github.com/haya14busa/reviewdog/cmd/reviewdog +REVIEWDOG_PRESENT := $(shell command -v reviewdog 2> /dev/null) + +GOLINT_CMD ?= golint +GOLINT_IMPORT_PATH ?= github.com/golang/lint/golint +GOLINT_PRESENT := $(shell command -v golint 2> /dev/null) + +# +# Targets +# + +# reviewdog diffs the golint warnings between the current branch and the +# REVIEWDOG_BRANCH (defaults to master). +.PHONY: reviewdog +reviewdog: +ifndef REVIEWDOG_PRESENT + @go get $(REVIEWDOG_IMPORT_PATH) +endif +ifndef GOLINT_PRESENT + @go get $(GOLINT_IMPORT_PATH) +endif + $(REVIEWDOG_CMD) $(REVIEWDOG_OPTIONS) + +# lint is an alias for reviewdog. +.PHONY: lint +lint: reviewdog diff --git a/dev-tools/make/xpack.mk b/dev-tools/make/xpack.mk index cb49f48c40f..1c1a6ef2f53 100644 --- a/dev-tools/make/xpack.mk +++ b/dev-tools/make/xpack.mk @@ -1,50 +1,12 @@ -# This is a minimal Makefile for Beats that are built with Mage. Its only -# responsibility is to provide compatibility with existing Jenkins and Travis -# setups. - # # Variables # -.DEFAULT_GOAL := help -PWD := $(CURDIR) +ES_BEATS ?= ../.. +PYTHON_ENV ?= $(ES_BEATS) # # Includes # -include $(ES_BEATS)/dev-tools/make/mage.mk - -# -# Targets (alphabetically sorted). -# -.PHONY: check -check: mage - mage check - -.PHONY: clean -clean: mage - mage clean - -.PHONY: fmt -fmt: mage - mage fmt - -# Default target. -.PHONY: help -help: - @echo Use mage rather than make. Here are the available mage targets: - @mage -l - -.PHONY: release -release: mage - mage package - -.PHONY: testsuite -testsuite: mage - -rm build/TEST-go-integration.out - mage update build unitTest integTest || ( cat build/TEST-go-integration.out && false ) - - -.PHONY: update -update: mage - mage update - +include $(ES_BEATS)/dev-tools/make/mage_wrapper.mk +include $(ES_BEATS)/dev-tools/make/misspell.mk +include $(ES_BEATS)/dev-tools/make/gox.mk diff --git a/dev-tools/packaging/templates/windows/install-service.ps1.tmpl b/dev-tools/packaging/templates/windows/install-service.ps1.tmpl index 4aecfc56cb8..02be77b3bb0 100644 --- a/dev-tools/packaging/templates/windows/install-service.ps1.tmpl +++ b/dev-tools/packaging/templates/windows/install-service.ps1.tmpl @@ -17,4 +17,4 @@ New-Service -name {{.BeatName}} ` Try { Start-Process -FilePath sc.exe -ArgumentList 'config {{.BeatName}} start=delayed-auto' } -Catch { Write-Host "An error occured setting the service to delayed start." -ForegroundColor Red } +Catch { Write-Host "An error occurred setting the service to delayed start." -ForegroundColor Red } diff --git a/docs/magefile.go b/docs/magefile.go new file mode 100644 index 00000000000..f31bb15f6ad --- /dev/null +++ b/docs/magefile.go @@ -0,0 +1,37 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// +build mage + +package main + +import "github.com/elastic/beats/dev-tools/mage" + +// Docs generates the dev-guide documentation. +func Docs() error { + return mage.Docs.AsciidocBook( + mage.DocsName("dev-guide"), + mage.DocsIndexFile("devguide/index.asciidoc"), + ) +} + +// Clean cleans the generated documentation. +func Clean() error { + return mage.Clean([]string{ + "build", + }) +} diff --git a/libbeat/scripts/README.md b/libbeat/scripts/README.md index 91409163439..ec0a40c9439 100644 --- a/libbeat/scripts/README.md +++ b/libbeat/scripts/README.md @@ -4,8 +4,6 @@ Below is a brief description of each file / folder. | File / Folder | Description | |----------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------| -| docker-entrypoint.sh | Entrypoint file used for the Dockerfile | | Makefile | General Makefile which is copied over to all beats. This contains the basic methods which are shared across all beat | -| install-go.ps1 | PowerShell script for automating the install of Go on Windows.| diff --git a/libbeat/scripts/create_packer.py b/libbeat/scripts/create_packer.py deleted file mode 100644 index 58f7a581a8e..00000000000 --- a/libbeat/scripts/create_packer.py +++ /dev/null @@ -1,69 +0,0 @@ -import os -import argparse - -# Adds dev-tools/packer directory with the necessary files to a beat - - -def generate_packer(es_beats, abs_path, beat, beat_path, version): - - # create dev-tools/packer - packer_path = abs_path + "/dev-tools/packer" - - print(packer_path) - - if os.path.isdir(packer_path): - print("Dev tools already exists. Stopping...") - return - - # create all directories needed - os.makedirs(packer_path + "/beats") - - templates = es_beats + "/libbeat/scripts/dev-tools/packer" - - content = load_file(templates + "/version.yml", beat, beat_path, version) - with open(packer_path + "/version.yml", "w") as f: - f.write(content) - - content = load_file(templates + "/Makefile", beat, beat_path, version) - with open(packer_path + "/Makefile", "w") as f: - f.write(content) - - content = load_file(templates + "/config.yml", beat, beat_path, version) - with open(packer_path + "/beats/" + beat + ".yml", "w") as f: - f.write(content) - - print("Packer directories created") - - -def load_file(file, beat, beat_path, version): - content = "" - with open(file) as f: - content = f.read() - - return content.replace("{beat}", beat).replace("{beat_path}", beat_path).replace("{version}", version) - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser(description="Creates the beats packer structure") - parser.add_argument("--beat", help="Beat name", default="test") - parser.add_argument("--beat_path", help="Beat path", default="./") - parser.add_argument("--es_beats", help="Beat path", default="../") - parser.add_argument("--version", help="Beat version", default="0.1.0") - - args = parser.parse_args() - - # Fetches GOPATH and current execution directory. It is expected to run this script from the Makefile. - gopath = os.environ['GOPATH'].split(os.pathsep)[0] - # Normalise go path - gopath = os.path.abspath(gopath) - abs_path = os.path.abspath("./") - - # Removes the gopath + /src/ from the directory name to fetch the path - beat_path = abs_path[len(gopath) + 5:] - - print(beat_path) - print(abs_path) - - es_beats = os.path.abspath(args.es_beats) - generate_packer(es_beats, abs_path, args.beat, beat_path, args.version) diff --git a/libbeat/scripts/wait_for.sh b/libbeat/scripts/wait_for.sh deleted file mode 100644 index bbe23181af0..00000000000 --- a/libbeat/scripts/wait_for.sh +++ /dev/null @@ -1,22 +0,0 @@ -#!/bin/bash -set -e - -# Wait for. Params: host, port, service -waitFor() { - echo -n "Waiting for ${3}(${1}:${2}) to start." - for ((i=1; i<=90; i++)) do - if nc -vz ${1} ${2} 2>/dev/null; then - echo - echo "${3} is ready!" - return 0 - fi - - ((i++)) - echo -n '.' - sleep 1 - done - - echo - echo >&2 "${3} is not available" - echo >&2 "Address: ${1}:${2}" -} diff --git a/magefile.go b/magefile.go index ce661e461bc..9ad1238d244 100644 --- a/magefile.go +++ b/magefile.go @@ -20,8 +20,19 @@ package main import ( + "bufio" + "bytes" "fmt" + "math" + "os" "path/filepath" + "sort" + "strconv" + "strings" + "text/template" + "time" + + "github.com/pkg/errors" "github.com/magefile/mage/mg" "github.com/magefile/mage/sh" @@ -31,50 +42,117 @@ import ( ) var ( - // Beats is a list of Beats to collect dashboards from. - Beats = []string{ - "auditbeat", - "filebeat", - "heartbeat", - "journalbeat", - "metricbeat", - "packetbeat", - "winlogbeat", - "x-pack/functionbeat", + projects = projectList{ + {"libbeat", build | fields | docs | unitTest | integTest | linuxCI | macosCI}, + {"auditbeat", build | fields | update | docs | packaging | unitTest | integTest | linuxCI | macosCI}, + {"filebeat", build | fields | update | docs | packaging | unitTest | integTest | linuxCI | macosCI}, + {"heartbeat", build | fields | update | docs | packaging | dashboards | unitTest | integTest | linuxCI | macosCI}, + {"journalbeat", build | fields | update | docs | packaging | dashboards | integTest | linuxCI}, + {"metricbeat", build | fields | update | docs | packaging | dashboards | unitTest | integTest | linuxCI | macosCI}, + {"packetbeat", build | fields | update | docs | packaging | dashboards | unitTest | linuxCI | macosCI}, + {"winlogbeat", build | fields | update | docs | packaging | dashboards | unitTest | linuxCI}, + {"x-pack/libbeat", build | unitTest | linuxCI}, + {"x-pack/auditbeat", build | fields | update | packaging | dashboards | unitTest | integTest | linuxCI | macosCI}, + {"x-pack/filebeat", build | fields | update | packaging | dashboards | unitTest | integTest | linuxCI | macosCI}, + {"x-pack/functionbeat", build | fields | update | packaging | dashboards | unitTest | integTest | linuxCI}, + {"x-pack/heartbeat", build | fields | update | packaging | linuxCI}, + {"x-pack/journalbeat", build | fields | update | packaging | linuxCI}, + {"x-pack/metricbeat", build | fields | update | packaging | update | linuxCI}, + {"x-pack/packetbeat", build | fields | update | packaging | linuxCI}, + {"x-pack/winlogbeat", build | fields | update | packaging | linuxCI}, + {"dev-tools/packaging/preference-pane", build | macosCI}, + {"deploy/kubernetes", update}, + {"docs", docs}, + + // TODO: Add generators. + } + + Aliases = map[string]interface{}{ + "check": Check.All, + "fmt": Check.Fmt, + "package": Package.All, + "test": Test.All, + "update": Update.All, + "vet": Check.Vet, } ) -// PackageBeatDashboards packages the dashboards from all Beats into a zip -// file. The dashboards must be generated first. -func PackageBeatDashboards() error { - version, err := mage.BeatQualifiedVersion() - if err != nil { - return err +type project struct { + Dir string + Attrs attribute +} + +func (p project) HasAttribute(a attribute) bool { + return p.Attrs&a > 0 +} + +type attribute uint16 + +const ( + none attribute = 0 + build attribute = 1 << iota + update + dashboards + docs + fields + packaging + unitTest + integTest + + linuxCI + macosCI + + any attribute = math.MaxUint16 +) + +type projectList []project + +func (l projectList) ForEach(attr attribute, f func(proj project) error) error { + for _, proj := range l { + if proj.Attrs&attr > 0 { + if err := f(proj); err != nil { + return err + } + } } + return nil +} - spec := mage.PackageSpec{ - Name: "beats-dashboards", - Version: version, - Snapshot: mage.Snapshot, - Files: map[string]mage.PackageFile{ - ".build_hash.txt": mage.PackageFile{ - Content: "{{ commit }}\n", - }, - }, - OutputFile: "build/distributions/dashboards/{{.Name}}-{{.Version}}{{if .Snapshot}}-SNAPSHOT{{end}}", +// --- Targets --- + +func Clean() error { + paths := []string{ + "build", + "docs/build", + "generator/beat/build", + "generator/metricbeat/build", } - for _, beat := range Beats { - spec.Files[beat] = mage.PackageFile{ - Source: filepath.Join(beat, "_meta/kibana.generated"), + _ = projects.ForEach(any, func(proj project) error { + if strings.HasSuffix(filepath.Base(proj.Dir), "beat") { + beatName := filepath.Base(proj.Dir) + for _, path := range mage.DefaultCleanPaths { + path = mage.MustExpand(path, map[string]interface{}{ + "BeatName": beatName, + }) + paths = append(paths, filepath.Join(proj.Dir, path)) + } } - } + return nil + }) - return mage.PackageZip(spec.Evaluate()) + return mage.Clean(paths) +} + +type Check mg.Namespace + +// Check checks that code is formatted and generated files are up-to-date. +func (Check) All() { + mg.SerialDeps(Check.Fmt, Check.Targets, Update.All, mage.Check) } // Fmt formats code and adds license headers. -func Fmt() { +func (Check) Fmt() { mg.Deps(mage.GoImports, mage.PythonAutopep8) mg.Deps(addLicenseHeaders) } @@ -94,7 +172,372 @@ func addLicenseHeaders() error { ) } +func (Check) Vet() error { + return mage.GoVet() +} + +var commonBeatTargets = []string{ + "check", + "clean", + "dumpVariables", + "fmt", + "build", + "buildGoDaemon", + "crossBuild", + "crossBuildGoDaemon", + "crossBuildGoDaemon", + "golangCrossBuild", + "update:fields", +} + +func (Check) Targets() error { + mageCmd := sh.OutCmd("mage", "-d") + var errs []error + err := projects.ForEach(any, func(proj project) error { + fmt.Println("> check:targets:", proj.Dir) + out, err := mageCmd(proj.Dir, "-l") + if err != nil { + return errors.Wrapf(err, "failed checking mage targets of project %v", proj.Dir) + } + targets, err := parseTargets(out) + if err != nil { + return errors.Wrapf(err, "failed parsing mage -l output of project %v", proj.Dir) + } + + var expectedTargets []string + if strings.HasSuffix(proj.Dir, "beat") { + // Build list of expected targets based on attributes. + expectedTargets = make([]string, len(commonBeatTargets)) + copy(expectedTargets, commonBeatTargets) + } + if proj.HasAttribute(build) { + expectedTargets = append(expectedTargets, "build") + } + if proj.HasAttribute(fields) { + expectedTargets = append(expectedTargets, "update:fields") + } + if proj.HasAttribute(update) { + expectedTargets = append(expectedTargets, "update") + } + if proj.HasAttribute(dashboards) { + expectedTargets = append(expectedTargets, "update:dashboards", "dashboards:import", "dashboards:export") + } + if proj.HasAttribute(docs) { + expectedTargets = append(expectedTargets, "docs") + } + if proj.HasAttribute(packaging) { + expectedTargets = append(expectedTargets, "package", "packageTest") + } + if proj.HasAttribute(unitTest) { + expectedTargets = append(expectedTargets, "unitTest") + } + if proj.HasAttribute(integTest) { + expectedTargets = append(expectedTargets, "integTest") + } + + // Check for missing targets. + var missing []string + for _, target := range expectedTargets { + if _, found := targets[target]; !found { + missing = append(missing, target) + } + } + if len(missing) > 0 { + sort.Strings(missing) + err = errors.Errorf("failed checking mage targets of project "+ + "%v: missing [%v]", proj.Dir, strings.Join(missing, ", ")) + errs = append(errs, err) + } + // Check for missing descriptions. + var badDescription []string + for target, desc := range targets { + desc := strings.TrimSpace(desc) + if desc == "" || !strings.HasSuffix(desc, ".") { + badDescription = append(badDescription, target) + } + } + if len(badDescription) > 0 { + sort.Strings(badDescription) + err = errors.Errorf("failed checking mage targets of project "+ + "%v: no descriptions or missing period for [%v]", proj.Dir, strings.Join(badDescription, ", ")) + errs = append(errs, err) + } + return nil + }) + if err != nil { + return err + } + + return multierr.Combine(errs...) +} + +func parseTargets(rawOutput string) (map[string]string, error) { + targets := map[string]string{} + s := bufio.NewScanner(bytes.NewBufferString(rawOutput)) + for s.Scan() { + line := s.Text() + if line == "Targets:" || strings.HasPrefix(line, "*") { + continue + } + if parts := strings.Fields(line); len(parts) > 0 { + targets[parts[0]] = strings.Join(parts[1:], " ") + } + } + return targets, s.Err() +} + +func Docs() error { + return projects.ForEach(docs, func(proj project) error { + fmt.Println("> docs:", proj.Dir) + return mage.Mage(proj.Dir, "docs") + }) +} + // DumpVariables writes the template variables and values to stdout. func DumpVariables() error { return mage.DumpVariables() } + +type Update mg.Namespace + +// All updates all Beats. +func (Update) All() error { + mg.Deps(Update.Notice, Update.TravisCI) + return projects.ForEach(update, func(proj project) error { + fmt.Println("> update:all:", proj.Dir) + return errors.Wrapf(mage.Mage(proj.Dir, "update"), "failed updating project %v", proj.Dir) + }) +} + +// Fields updates the fields for each Beat. +func (Update) Fields() error { + return projects.ForEach(fields, func(proj project) error { + fmt.Println("> update:fields:", proj.Dir) + return errors.Wrapf(mage.Mage(proj.Dir, "fields"), "failed updating project %v", proj.Dir) + }) +} + +// Dashboards updates the dashboards for each Beat. +func (Update) Dashboards() error { + return projects.ForEach(dashboards, func(proj project) error { + fmt.Println("> update:dashboards:", proj.Dir) + return errors.Wrapf(mage.Mage(proj.Dir, "update:dashboards"), "failed updating project %v", proj.Dir) + }) +} + +func (Update) Notice() error { + ve, err := mage.PythonVirtualenv() + if err != nil { + return err + } + pythonPath, err := mage.LookVirtualenvPath(ve, "python") + if err != nil { + return err + } + return sh.RunV(pythonPath, filepath.Clean("dev-tools/generate_notice.py"), ".") +} + +func (Update) TravisCI() error { + var data TravisCITemplateData + + // Check + data.Jobs = append(data.Jobs, TravisCIJob{ + OS: "linux", + Stage: "check", + Env: []string{ + "BUILD_CMD=" + strconv.Quote("mage"), + "TARGETS=" + strconv.Quote("check"), + }, + }) + + _ = projects.ForEach(any, func(proj project) error { + if proj.HasAttribute(linuxCI) && (proj.HasAttribute(unitTest) || proj.HasAttribute(integTest)) { + var targets []string + if proj.HasAttribute(unitTest) { + targets = append(targets, "unitTest") + } + if proj.HasAttribute(integTest) { + targets = append(targets, "integTest") + } + data.Jobs = append(data.Jobs, TravisCIJob{ + OS: "linux", + Stage: "test", + Env: []string{ + "BUILD_CMD=" + strconv.Quote("mage -d "+filepath.ToSlash(proj.Dir)), + "TARGETS=" + strconv.Quote(strings.Join(targets, " ")), + }, + }) + } + + // We don't run the integTest on OSX because they require Docker. + if proj.HasAttribute(macosCI) && proj.HasAttribute(unitTest) { + data.Jobs = append(data.Jobs, TravisCIJob{ + OS: "osx", + Stage: "test", + Env: []string{ + "BUILD_CMD=" + strconv.Quote("mage -d "+filepath.ToSlash(proj.Dir)), + "TARGETS=" + strconv.Quote("unitTest"), + }, + }) + } + return nil + }) + + // Docs + data.Jobs = append(data.Jobs, TravisCIJob{ + OS: "linux", + Stage: "test", + Env: []string{ + "BUILD_CMD=" + strconv.Quote("mage"), + "TARGETS=" + strconv.Quote("docs"), + }, + }) + + _ = projects.ForEach(any, func(proj project) error { + if !strings.HasSuffix(filepath.Base(proj.Dir), "beat") { + return nil + } + + data.Jobs = append(data.Jobs, TravisCIJob{ + OS: "linux", + Stage: "crosscompile", + Env: []string{ + "BUILD_CMD=" + strconv.Quote("make -C "+proj.Dir), + "TARGETS=" + strconv.Quote("gox"), + }, + }) + return nil + }) + + elasticBeats, err := mage.ElasticBeatsDir() + if err != nil { + return err + } + + t, err := template.ParseFiles(filepath.Join(elasticBeats, "dev-tools/ci/templates/travis.yml.tmpl")) + if err != nil { + return err + } + + out, err := os.OpenFile(".travis.yml", os.O_TRUNC|os.O_CREATE|os.O_WRONLY, 0755) + if err != nil { + return err + } + defer out.Close() + + return t.Execute(out, data) +} + +type TravisCITemplateData struct { + Jobs []TravisCIJob +} + +type TravisCIJob struct { + OS string + Env []string + Stage string +} + +type Package mg.Namespace + +// All packages all Beats and generates the dashboards zip package. +func (Package) All() { + mg.SerialDeps(Package.Dashboards, Package.Beats) +} + +// Dashboards packages the dashboards from all Beats into a zip file. +func (Package) Dashboards() error { + mg.Deps(Update.Dashboards) + + version, err := mage.BeatQualifiedVersion() + if err != nil { + return err + } + + spec := mage.PackageSpec{ + Name: "beats-dashboards", + Version: version, + Snapshot: mage.Snapshot, + Files: map[string]mage.PackageFile{ + ".build_hash.txt": mage.PackageFile{ + Content: "{{ commit }}\n", + }, + }, + OutputFile: "build/distributions/dashboards/{{.Name}}-{{.Version}}{{if .Snapshot}}-SNAPSHOT{{end}}", + } + + _ = projects.ForEach(dashboards, func(proj project) error { + beat := filepath.Base(proj.Dir) + spec.Files[beat] = mage.PackageFile{ + Source: filepath.Join(proj.Dir, "build/kibana"), + } + return nil + }) + + return mage.PackageZip(spec.Evaluate()) +} + +// Beats packages each Beat. +// +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func (Package) Beats() (err error) { + return projects.ForEach(packaging, func(proj project) error { + fmt.Println("> package:beats:", proj.Dir) + if err := mage.Mage(proj.Dir, "package"); err != nil { + return errors.Wrapf(err, "failed packaging project %v", proj.Dir) + } + + // Copy files to build/distributions. + const distDir = "build/distributions" + if err = os.MkdirAll(distDir, 0755); err != nil { + return err + } + files, err := mage.FindFiles(filepath.Join(proj.Dir, distDir, "*")) + if err != nil { + return err + } + for _, f := range files { + if err = os.Rename(f, filepath.Join(distDir, filepath.Base(f))); err != nil { + return errors.Wrap(err, "failed moving packages to top-level build dir") + } + } + return nil + }) +} + +type Test mg.Namespace + +func (Test) All() error { + start := time.Now() + defer func() { fmt.Println("test:all ran for", time.Since(start)) }() + + return projects.ForEach(any, func(proj project) error { + fmt.Println("> test:all:", proj.Dir) + if !proj.HasAttribute(unitTest) && !proj.HasAttribute(integTest) { + return nil + } + return errors.Wrapf(mage.Mage(proj.Dir, "test"), "failed testing project %v", proj.Dir) + }) +} + +func (Test) Unit() error { + start := time.Now() + defer func() { fmt.Println("test:unit ran for", time.Since(start)) }() + + return projects.ForEach(unitTest, func(proj project) error { + fmt.Println("> test:unit:", proj.Dir) + return errors.Wrapf(mage.Mage(proj.Dir, "unitTest"), "failed testing project %v", proj.Dir) + }) +} + +func (Test) Integ() error { + start := time.Now() + defer func() { fmt.Println("test:integ ran for", time.Since(start)) }() + + return projects.ForEach(integTest, func(proj project) error { + fmt.Println("> test:integ:", proj.Dir) + return errors.Wrapf(mage.Mage(proj.Dir, "integTest"), "failed testing project %v", proj.Dir) + }) +} diff --git a/reviewdog.yml b/reviewdog.yml index f9590aa39d4..efc2e7f6d88 100644 --- a/reviewdog.yml +++ b/reviewdog.yml @@ -1,3 +1,3 @@ runner: golint: - cmd: golint $(go list ./... | grep -v /vendor/) | grep -v "don't use an underscore in package name" + cmd: golint $(go list ./...) | grep -v "don't use an underscore in package name" diff --git a/script/build_docs.sh b/script/build_docs.sh deleted file mode 100755 index d53f570d939..00000000000 --- a/script/build_docs.sh +++ /dev/null @@ -1,35 +0,0 @@ -#!/usr/bin/env bash - -set -e - -name=$1 -path=$2 -build_dir=$3 - -docs_dir=$build_dir/docs -html_dir=$build_dir/html_docs - -# Checks if docs clone already exists -if [ ! -d $docs_dir ]; then - # Only head is cloned - git clone --depth=1 https://github.com/elastic/docs.git $docs_dir -else - echo "$docs_dir already exists. Not cloning." -fi - -index_list="$(find ${GOPATH%%:*}/src/$path -name 'index.asciidoc' -maxdepth 1)" -for index in $index_list -do - echo "Building docs for ${name}..." - echo "Index document: ${index}" - index_path=$(basename $(dirname $index)) - echo "Index path: $index_path" - - dest_dir="$html_dir/${name}/${index_path}" - mkdir -p "$dest_dir" - params="--chunk=1" - if [ "$PREVIEW" = "1" ]; then - params="--chunk=1 -open chunk=1 -open" - fi - $docs_dir/build_docs.pl $params --doc "$index" -out "$dest_dir" -done diff --git a/script/config_collector.py b/script/config_collector.py deleted file mode 100644 index 73e18ae7062..00000000000 --- a/script/config_collector.py +++ /dev/null @@ -1,95 +0,0 @@ -import os -import argparse -import yaml - -# Collects config for all modules - - -def collect(beat_name, beat_path, full=False): - - base_dir = beat_path + "/module" - path = os.path.abspath(base_dir) - - # yml file - - config_yml = "\n#========================== Modules configuration ============================\n" - config_yml += beat_name + """.modules: - -""" - - # Read the modules list but put "system" first - modules = ["system"] - for module in sorted(os.listdir(base_dir)): - if module != "system": - modules.append(module) - - # Iterate over all modules - for module in modules: - - beat_path = path + "/" + module + "/_meta" - - module_configs = beat_path + "/config.yml" - - # By default, short config is read if short is set - short_config = False - - # Check if full config exists - if full: - full_module_config = beat_path + "/config.reference.yml" - if os.path.isfile(full_module_config): - module_configs = full_module_config - - # Only check folders where config exists - if not os.path.isfile(module_configs): - continue - - # Load title from fields.yml - with open(beat_path + "/fields.yml") as f: - fields = yaml.load(f.read()) - title = fields[0]["title"] - - # Check if short config was disabled in fields.yml - if not full and "short_config" in fields[0]: - short_config = fields[0]["short_config"] - - if not full and short_config is False: - continue - - config_yml += get_title_line(title) - - # Load module yaml - with open(module_configs) as f: - for line in f: - config_yml += line - - config_yml += "\n" - # output string so it can be concatenated - print(config_yml) - - -# Makes sure every title line is 79 + newline chars long -def get_title_line(title): - dashes = (79 - 10 - len(title)) // 2 - - line = "#" - line += "-" * dashes - line += " " + title + " Module " - line += "-" * dashes - - return line[0:78] + "\n" - - -if __name__ == "__main__": - - parser = argparse.ArgumentParser( - description="Collects modules config") - parser.add_argument("path", help="Path to the beat folder") - parser.add_argument("--beat", help="Beat name") - parser.add_argument("--full", action="store_true", - help="Collect the full versions") - - args = parser.parse_args() - beat_name = args.beat - beat_path = args.path - - collect(beat_name, beat_path, args.full) diff --git a/script/generate_imports.py b/script/generate_imports.py deleted file mode 100644 index 56bf8644b1e..00000000000 --- a/script/generate_imports.py +++ /dev/null @@ -1,58 +0,0 @@ -import sys -from os import listdir, getcwd -from os.path import abspath, isdir, join, dirname, basename -from argparse import ArgumentParser - -sys.path.append(abspath("scripts")) -from generate_imports_helper import comment, get_importable_lines - - -import_line_format = "\t_ \"{beat_path}/{module}/{name}\"" -import_template = """// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// Code generated by 'make imports' - DO NOT EDIT. - -/* -{comment} -*/ -package {package} - -import ( -{imports} -) -""" - - -def generate_and_write_to_file(outfile, go_beat_path): - imported_beat_lines = get_importable_lines(go_beat_path, import_line_format) - imported_lines = "\n".join(imported_beat_lines) - package = basename(dirname(outfile)) - list_go = import_template.format(package=package, - comment=comment, - imports=imported_lines) - with open(outfile, "w") as output: - output.write(list_go) - - -if __name__ == "__main__": - parser = ArgumentParser(description="Generate imports for Beats packages") - parser.add_argument("--out", default="include/list.go") - parser.add_argument("beats_path") - args = parser.parse_args() - - generate_and_write_to_file(args.out, args.beats_path) diff --git a/script/modules_collector.py b/script/modules_collector.py deleted file mode 100644 index 51b38980666..00000000000 --- a/script/modules_collector.py +++ /dev/null @@ -1,61 +0,0 @@ -import os -import argparse -import yaml -import six -import glob -import re - -# Collects module configs to modules.d - -REFERENCE_CONFIG_RE = re.compile('.+\.reference\.yml') - - -def collect(beat_name, docs_branch): - - base_dir = "module" - path = os.path.abspath("module") - - # TODO add module release status if beta or experimental - header = """# Module: {module} -# Docs: https://www.elastic.co/guide/en/beats/{beat_name}/{docs_branch}/{beat_name}-module-{module}.html - -""" - - # Create directory for module confs - os.mkdir(os.path.abspath('modules.d')) - - # Iterate over all modules - for module in sorted(os.listdir(base_dir)): - - module_confs = path + '/' + module + '/_meta/config*.yml' - for module_conf in glob.glob(module_confs): - - # Ignore reference confs - if REFERENCE_CONFIG_RE.match(module_conf): - continue - - if os.path.isfile(module_conf) == False: - continue - - module_file = header.format(module=module, beat_name=beat_name, docs_branch=docs_branch) - disabled_config_filename = os.path.basename(module_conf).replace('config', module) + '.disabled' - - with open(module_conf) as f: - module_file += f.read() - - # Write disabled module conf - with open(os.path.abspath('modules.d/') + '/' + disabled_config_filename, 'w') as f: - f.write(module_file) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Collects modules confs") - parser.add_argument("--beat", help="Beat name") - parser.add_argument("--docs_branch", help="Docs branch") - - args = parser.parse_args() - beat_name = args.beat - docs_branch = args.docs_branch - - collect(beat_name, docs_branch) diff --git a/setup.yml b/setup.yml deleted file mode 100644 index 168251f0630..00000000000 --- a/setup.yml +++ /dev/null @@ -1 +0,0 @@ -#======================== Template options =============================== diff --git a/testing/environments/test.env b/testing/environments/test.env new file mode 100644 index 00000000000..f2668dee2d7 --- /dev/null +++ b/testing/environments/test.env @@ -0,0 +1,5 @@ +BEAT_STRICT_PERMS=false +ES_HOST=elasticsearch +ES_PORT=9200 +ES_USER=beats +ES_PASS=testing From 8f75c8b0d76fc30438299111c70f011257185a09 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:52:55 -0500 Subject: [PATCH 02/18] Add magefile to deploy/kubernetes --- deploy/kubernetes/.travis/setup.sh | 5 +- deploy/kubernetes/Makefile | 23 ---- deploy/kubernetes/auditbeat-kubernetes.yaml | 3 +- .../auditbeat/auditbeat-daemonset.yaml | 1 + .../auditbeat/auditbeat-role-binding.yaml | 1 + .../kubernetes/auditbeat/auditbeat-role.yaml | 1 + .../auditbeat/auditbeat-service-account.yaml | 1 + deploy/kubernetes/filebeat-kubernetes.yaml | 3 +- .../filebeat/filebeat-daemonset.yaml | 1 + .../filebeat/filebeat-role-binding.yaml | 1 + deploy/kubernetes/filebeat/filebeat-role.yaml | 1 + .../filebeat/filebeat-service-account.yaml | 1 + deploy/kubernetes/magefile.go | 101 ++++++++++++++++++ deploy/kubernetes/metricbeat-kubernetes.yaml | 5 +- .../metricbeat/metricbeat-daemonset.yaml | 1 + .../metricbeat-deployment-configmap.yaml | 1 + .../metricbeat/metricbeat-deployment.yaml | 1 + .../metricbeat/metricbeat-role-binding.yaml | 1 + .../metricbeat/metricbeat-role.yaml | 1 + .../metricbeat-service-account.yaml | 1 + 20 files changed, 122 insertions(+), 32 deletions(-) delete mode 100644 deploy/kubernetes/Makefile create mode 100644 deploy/kubernetes/magefile.go diff --git a/deploy/kubernetes/.travis/setup.sh b/deploy/kubernetes/.travis/setup.sh index 41ba277cd9f..c7bde15c87e 100755 --- a/deploy/kubernetes/.travis/setup.sh +++ b/deploy/kubernetes/.travis/setup.sh @@ -1,5 +1,6 @@ -# This script assumes Docker is already installed -#!/bin/bash +#!/usr/bin/env bash + +# This script assumes Docker is already installed. set -x set -e diff --git a/deploy/kubernetes/Makefile b/deploy/kubernetes/Makefile deleted file mode 100644 index 722cac158d1..00000000000 --- a/deploy/kubernetes/Makefile +++ /dev/null @@ -1,23 +0,0 @@ -ALL=filebeat metricbeat auditbeat -BEAT_VERSION=$(shell head -n 1 ../../libbeat/docs/version.asciidoc | cut -c 17- ) - -.PHONY: all $(ALL) - -all: $(ALL) - -test: all - for FILE in $(shell ls *-kubernetes.yaml); do \ - BEAT=$$(echo $$FILE | cut -d \- -f 1); \ - kubectl create -f $$FILE; \ - done - -clean: - @for f in $(ALL); do rm -f "$$f-kubernetes.yaml"; done - -$(ALL): - @echo "Generating $@-kubernetes.yaml" - @rm -f $@-kubernetes.yaml - @for f in $(shell ls $@/*.yaml); do \ - sed "s/%VERSION%/${BEAT_VERSION}/g" $$f >> $@-kubernetes.yaml; \ - echo --- >> $@-kubernetes.yaml; \ - done diff --git a/deploy/kubernetes/auditbeat-kubernetes.yaml b/deploy/kubernetes/auditbeat-kubernetes.yaml index 9589a323260..3370056a157 100644 --- a/deploy/kubernetes/auditbeat-kubernetes.yaml +++ b/deploy/kubernetes/auditbeat-kubernetes.yaml @@ -71,7 +71,7 @@ spec: dnsPolicy: ClusterFirstWithHostNet containers: - name: auditbeat - image: docker.elastic.co/beats/auditbeat:7.0.0-alpha2 + image: docker.elastic.co/beats/auditbeat:7.0.0 args: [ "-c", "/etc/auditbeat.yml" ] @@ -182,4 +182,3 @@ metadata: namespace: kube-system labels: k8s-app: auditbeat ---- diff --git a/deploy/kubernetes/auditbeat/auditbeat-daemonset.yaml b/deploy/kubernetes/auditbeat/auditbeat-daemonset.yaml index 2a3f19aac69..3040bb58af7 100644 --- a/deploy/kubernetes/auditbeat/auditbeat-daemonset.yaml +++ b/deploy/kubernetes/auditbeat/auditbeat-daemonset.yaml @@ -1,3 +1,4 @@ +--- # Deploy a auditbeat instance per node for node metrics retrieval apiVersion: extensions/v1beta1 kind: DaemonSet diff --git a/deploy/kubernetes/auditbeat/auditbeat-role-binding.yaml b/deploy/kubernetes/auditbeat/auditbeat-role-binding.yaml index dec98a5f1e5..573639b636f 100644 --- a/deploy/kubernetes/auditbeat/auditbeat-role-binding.yaml +++ b/deploy/kubernetes/auditbeat/auditbeat-role-binding.yaml @@ -1,3 +1,4 @@ +--- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRoleBinding metadata: diff --git a/deploy/kubernetes/auditbeat/auditbeat-role.yaml b/deploy/kubernetes/auditbeat/auditbeat-role.yaml index ae6d32f4149..bcdcbf36027 100644 --- a/deploy/kubernetes/auditbeat/auditbeat-role.yaml +++ b/deploy/kubernetes/auditbeat/auditbeat-role.yaml @@ -1,3 +1,4 @@ +--- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRole metadata: diff --git a/deploy/kubernetes/auditbeat/auditbeat-service-account.yaml b/deploy/kubernetes/auditbeat/auditbeat-service-account.yaml index 641f4ddd1eb..c73bc05080e 100644 --- a/deploy/kubernetes/auditbeat/auditbeat-service-account.yaml +++ b/deploy/kubernetes/auditbeat/auditbeat-service-account.yaml @@ -1,3 +1,4 @@ +--- apiVersion: v1 kind: ServiceAccount metadata: diff --git a/deploy/kubernetes/filebeat-kubernetes.yaml b/deploy/kubernetes/filebeat-kubernetes.yaml index d15197a8ac6..1df4cbd1be0 100644 --- a/deploy/kubernetes/filebeat-kubernetes.yaml +++ b/deploy/kubernetes/filebeat-kubernetes.yaml @@ -69,7 +69,7 @@ spec: terminationGracePeriodSeconds: 30 containers: - name: filebeat - image: docker.elastic.co/beats/filebeat:7.0.0-alpha2 + image: docker.elastic.co/beats/filebeat:7.0.0 args: [ "-c", "/etc/filebeat.yml", "-e", @@ -164,4 +164,3 @@ metadata: namespace: kube-system labels: k8s-app: filebeat ---- diff --git a/deploy/kubernetes/filebeat/filebeat-daemonset.yaml b/deploy/kubernetes/filebeat/filebeat-daemonset.yaml index f554dee185b..f4fb4d24612 100644 --- a/deploy/kubernetes/filebeat/filebeat-daemonset.yaml +++ b/deploy/kubernetes/filebeat/filebeat-daemonset.yaml @@ -1,3 +1,4 @@ +--- apiVersion: extensions/v1beta1 kind: DaemonSet metadata: diff --git a/deploy/kubernetes/filebeat/filebeat-role-binding.yaml b/deploy/kubernetes/filebeat/filebeat-role-binding.yaml index f24259ffa6a..fe45c8bc658 100644 --- a/deploy/kubernetes/filebeat/filebeat-role-binding.yaml +++ b/deploy/kubernetes/filebeat/filebeat-role-binding.yaml @@ -1,3 +1,4 @@ +--- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRoleBinding metadata: diff --git a/deploy/kubernetes/filebeat/filebeat-role.yaml b/deploy/kubernetes/filebeat/filebeat-role.yaml index 160bb9044fa..e934b61d56a 100644 --- a/deploy/kubernetes/filebeat/filebeat-role.yaml +++ b/deploy/kubernetes/filebeat/filebeat-role.yaml @@ -1,3 +1,4 @@ +--- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRole metadata: diff --git a/deploy/kubernetes/filebeat/filebeat-service-account.yaml b/deploy/kubernetes/filebeat/filebeat-service-account.yaml index e243881a1d6..af3101451cb 100644 --- a/deploy/kubernetes/filebeat/filebeat-service-account.yaml +++ b/deploy/kubernetes/filebeat/filebeat-service-account.yaml @@ -1,3 +1,4 @@ +--- apiVersion: v1 kind: ServiceAccount metadata: diff --git a/deploy/kubernetes/magefile.go b/deploy/kubernetes/magefile.go new file mode 100644 index 00000000000..437d96d2a3b --- /dev/null +++ b/deploy/kubernetes/magefile.go @@ -0,0 +1,101 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// +build mage + +package main + +import ( + "fmt" + "path/filepath" + "regexp" + + "github.com/magefile/mage/mg" + "github.com/magefile/mage/sh" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" +) + +var ( + beats = []string{ + "auditbeat", + "filebeat", + "metricbeat", + } + + kubectlCmd = sh.RunCmd("kubectl") +) + +// Clean deletes the generated beat-kubernetes.yaml files. +func Clean() error { + return mage.Clean([]string{ + "*beat-kubernetes.yaml", + }) +} + +// IntegTest tests the kubernetes config by deploying it. kubectl is required. +func IntegTest() error { + if err := haveKubernetes(); err != nil { + fmt.Println(">> integTest: kubernetes testing (SKIPPED - kubernetes unavailable)") + return nil + } + + for _, beat := range beats { + manifest := beat + "-kubernetes.yaml" + if err := sh.RunV("kubectl", "create", "-f", manifest); err != nil { + return errors.Wrapf(err, "failed deploying %v to kubernetes", manifest) + } + } + return nil +} + +// Update generates the kubernetes config files. +func Update() error { + mg.Deps(Clean) + + version, err := mage.BeatQualifiedVersion() + if err != nil { + return err + } + + for _, beat := range beats { + in := filepath.Join(beat, beat+"-*.yaml") + out := beat + "-kubernetes.yaml" + + inputs, err := mage.FindFiles(in) + if err != nil { + return err + } + + if err = mage.FileConcat(out, 0644, inputs...); err != nil { + return err + } + + if err = mage.FindReplace(out, regexp.MustCompile(`%VERSION%`), version); err != nil { + return err + } + } + return nil +} + +// haveKubernetes returns an error if the 'kubectl version' command returns a +// non-zero exit code. +func haveKubernetes() error { + err := kubectlCmd("version") + return errors.Wrap(err, "kubernetes is not available") +} diff --git a/deploy/kubernetes/metricbeat-kubernetes.yaml b/deploy/kubernetes/metricbeat-kubernetes.yaml index 957a412be9a..496500c1500 100644 --- a/deploy/kubernetes/metricbeat-kubernetes.yaml +++ b/deploy/kubernetes/metricbeat-kubernetes.yaml @@ -104,7 +104,7 @@ spec: dnsPolicy: ClusterFirstWithHostNet containers: - name: metricbeat - image: docker.elastic.co/beats/metricbeat:7.0.0-alpha2 + image: docker.elastic.co/beats/metricbeat:7.0.0 args: [ "-c", "/etc/metricbeat.yml", "-e", @@ -242,7 +242,7 @@ spec: dnsPolicy: ClusterFirstWithHostNet containers: - name: metricbeat - image: docker.elastic.co/beats/metricbeat:7.0.0-alpha2 + image: docker.elastic.co/beats/metricbeat:7.0.0 args: [ "-c", "/etc/metricbeat.yml", "-e", @@ -340,4 +340,3 @@ metadata: namespace: kube-system labels: k8s-app: metricbeat ---- diff --git a/deploy/kubernetes/metricbeat/metricbeat-daemonset.yaml b/deploy/kubernetes/metricbeat/metricbeat-daemonset.yaml index 6335a73f9e5..96cba1d7a88 100644 --- a/deploy/kubernetes/metricbeat/metricbeat-daemonset.yaml +++ b/deploy/kubernetes/metricbeat/metricbeat-daemonset.yaml @@ -1,3 +1,4 @@ +--- # Deploy a Metricbeat instance per node for node metrics retrieval apiVersion: extensions/v1beta1 kind: DaemonSet diff --git a/deploy/kubernetes/metricbeat/metricbeat-deployment-configmap.yaml b/deploy/kubernetes/metricbeat/metricbeat-deployment-configmap.yaml index 62cbd79f560..d86a1d5ba4b 100644 --- a/deploy/kubernetes/metricbeat/metricbeat-deployment-configmap.yaml +++ b/deploy/kubernetes/metricbeat/metricbeat-deployment-configmap.yaml @@ -1,3 +1,4 @@ +--- apiVersion: v1 kind: ConfigMap metadata: diff --git a/deploy/kubernetes/metricbeat/metricbeat-deployment.yaml b/deploy/kubernetes/metricbeat/metricbeat-deployment.yaml index fec95a83c6c..9aedf07f299 100644 --- a/deploy/kubernetes/metricbeat/metricbeat-deployment.yaml +++ b/deploy/kubernetes/metricbeat/metricbeat-deployment.yaml @@ -1,3 +1,4 @@ +--- # Deploy singleton instance in the whole cluster for some unique data sources, like kube-state-metrics apiVersion: apps/v1beta1 kind: Deployment diff --git a/deploy/kubernetes/metricbeat/metricbeat-role-binding.yaml b/deploy/kubernetes/metricbeat/metricbeat-role-binding.yaml index 8a74c8f81a1..6f9e0cbf7da 100644 --- a/deploy/kubernetes/metricbeat/metricbeat-role-binding.yaml +++ b/deploy/kubernetes/metricbeat/metricbeat-role-binding.yaml @@ -1,3 +1,4 @@ +--- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRoleBinding metadata: diff --git a/deploy/kubernetes/metricbeat/metricbeat-role.yaml b/deploy/kubernetes/metricbeat/metricbeat-role.yaml index b4533e7bea6..1c83c408f48 100644 --- a/deploy/kubernetes/metricbeat/metricbeat-role.yaml +++ b/deploy/kubernetes/metricbeat/metricbeat-role.yaml @@ -1,3 +1,4 @@ +--- apiVersion: rbac.authorization.k8s.io/v1beta1 kind: ClusterRole metadata: diff --git a/deploy/kubernetes/metricbeat/metricbeat-service-account.yaml b/deploy/kubernetes/metricbeat/metricbeat-service-account.yaml index 2b2fd46dd31..f96df65189d 100644 --- a/deploy/kubernetes/metricbeat/metricbeat-service-account.yaml +++ b/deploy/kubernetes/metricbeat/metricbeat-service-account.yaml @@ -1,3 +1,4 @@ +--- apiVersion: v1 kind: ServiceAccount metadata: From 9b0c54ae6a95f1a5f9abcc01ffb17c4fc55f9162 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:52:22 -0500 Subject: [PATCH 03/18] Refactor Auditbeat build logic --- auditbeat/Makefile | 17 +- auditbeat/docker-compose.yml | 6 +- auditbeat/include/fields.go | 2 +- auditbeat/magefile.go | 198 ++---------------- auditbeat/scripts/mage/config.go | 34 ++- auditbeat/scripts/mage/fields.go | 86 ++++++++ auditbeat/scripts/mage/package.go | 59 ++++-- auditbeat/scripts/mage/{docs.go => update.go} | 85 ++++++-- x-pack/auditbeat/.gitignore | 4 - x-pack/auditbeat/Makefile | 7 +- x-pack/auditbeat/magefile.go | 179 ++-------------- x-pack/auditbeat/make.bat | 11 + 12 files changed, 276 insertions(+), 412 deletions(-) create mode 100644 auditbeat/scripts/mage/fields.go rename auditbeat/scripts/mage/{docs.go => update.go} (56%) delete mode 100644 x-pack/auditbeat/.gitignore create mode 100644 x-pack/auditbeat/make.bat diff --git a/auditbeat/Makefile b/auditbeat/Makefile index 33d426bea62..0326f3e977d 100644 --- a/auditbeat/Makefile +++ b/auditbeat/Makefile @@ -1,13 +1,4 @@ -BEAT_NAME=auditbeat -BEAT_TITLE=Auditbeat -SYSTEM_TESTS=true -TEST_ENVIRONMENT?=true -GOX_OS?=linux windows -ES_BEATS?=.. -EXCLUDE_COMMON_UPDATE_TARGET=true - -include ${ES_BEATS}/libbeat/scripts/Makefile - -.PHONY: update -update: mage - mage update +# +# Includes +# +include ../dev-tools/make/oss.mk diff --git a/auditbeat/docker-compose.yml b/auditbeat/docker-compose.yml index f9fca698825..ed183a23b23 100644 --- a/auditbeat/docker-compose.yml +++ b/auditbeat/docker-compose.yml @@ -5,11 +5,9 @@ services: depends_on: - proxy_dep working_dir: /go/src/github.com/elastic/beats/auditbeat + env_file: + - ${PWD}/../testing/environments/test.env environment: - - ES_HOST=elasticsearch - - ES_PORT=9200 - - ES_USER=beats - - ES_PASS=testing - KIBANA_HOST=kibana - KIBANA_PORT=5601 volumes: diff --git a/auditbeat/include/fields.go b/auditbeat/include/fields.go index 85d5ffb7343..8686a85d9e0 100644 --- a/auditbeat/include/fields.go +++ b/auditbeat/include/fields.go @@ -24,7 +24,7 @@ import ( ) func init() { - if err := asset.SetFields("auditbeat", "fields.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("auditbeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } diff --git a/auditbeat/magefile.go b/auditbeat/magefile.go index 0eb532f1e72..d000b8fcd17 100644 --- a/auditbeat/magefile.go +++ b/auditbeat/magefile.go @@ -20,186 +20,34 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" - auditbeat "github.com/elastic/beats/auditbeat/scripts/mage" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + auditbeat "github.com/elastic/beats/auditbeat/scripts/mage" ) func init() { - mage.BeatDescription = "Audit the activities of users and processes on your system." -} - -// Aliases provides compatibility with CI while we transition all Beats -// to having common testing targets. -var Aliases = map[string]interface{}{ - "goTestUnit": GoUnitTest, // dev-tools/jenkins_ci.ps1 uses this. -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatOSSPackaging() - mage.PackageKibanaDashboardsFromBuildDir() - auditbeat.CustomizePackaging(auditbeat.OSSPackaging) - - mg.SerialDeps(Fields, Dashboards, Config, mage.GenerateModuleIncludeListGo) - mg.Deps(CrossBuild, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Update is an alias for running fields, dashboards, config, includes. -func Update() { - mg.SerialDeps(Fields, Dashboards, Config, - mage.GenerateModuleIncludeListGo, Docs) -} - -// Config generates both the short/reference configs and populates the modules.d -// directory. -func Config() error { - return mage.Config(mage.AllConfigTypes, auditbeat.OSSConfigFileParams(), ".") -} - -// Fields generates fields.yml and fields.go files for the Beat. -func Fields() { - mg.Deps(libbeatAndAuditbeatCommonFieldsGo, moduleFieldsGo) - mg.Deps(fieldsYML) -} - -// libbeatAndAuditbeatCommonFieldsGo generates a fields.go containing both -// libbeat and auditbeat's common fields. -func libbeatAndAuditbeatCommonFieldsGo() error { - if err := mage.GenerateFieldsYAML(); err != nil { - return err - } - return mage.GenerateAllInOneFieldsGo() -} - -// moduleFieldsGo generates a fields.go for each module. -func moduleFieldsGo() error { - return mage.GenerateModuleFieldsGo("module") -} - -// fieldsYML generates the fields.yml file containing all fields. -func fieldsYML() error { - return mage.GenerateFieldsYAML("module") + auditbeat.SelectLogic = mage.OSSProject } -// ExportDashboard exports a dashboard and writes it into the correct directory. -// -// Required environment variables: -// - MODULE: Name of the module -// - ID: Dashboard id -func ExportDashboard() error { - return mage.ExportDashboard() -} - -// Dashboards collects all the dashboards and generates index patterns. -func Dashboards() error { - return mage.KibanaDashboards("module") -} - -// Docs collects the documentation. -func Docs() { - mg.Deps(auditbeat.ModuleDocs, auditbeat.FieldDocs) -} - -// Fmt formats source code and adds file headers. -func Fmt() { - mg.Deps(mage.Format) -} - -// Check runs fmt and update then returns an error if any modifications are found. -func Check() { - mg.SerialDeps(mage.Format, Update, mage.Check) -} - -// IntegTest executes integration tests (it uses Docker to run the tests). -func IntegTest() { - mage.AddIntegTestUsage() - defer mage.StopIntegTestEnv() - mg.SerialDeps(GoIntegTest, PythonIntegTest) -} - -// UnitTest executes the unit tests. -func UnitTest() { - mg.SerialDeps(GoUnitTest, PythonUnitTest) -} - -// GoUnitTest executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoUnitTest(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoIntegTest executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoIntegTest(ctx context.Context) error { - return mage.RunIntegTest("goIntegTest", func() error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) - }) -} - -// PythonUnitTest executes the python system tests. -func PythonUnitTest() error { - mg.Deps(mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestUnitArgs()) -} - -// PythonIntegTest executes the python system tests in the integration environment (Docker). -func PythonIntegTest(ctx context.Context) error { - if !mage.IsInIntegTestEnv() { - mg.SerialDeps(Fields, Dashboards) - } - return mage.RunIntegTest("pythonIntegTest", func() error { - mg.Deps(mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestIntegrationArgs()) - }) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(auditbeat.Update.All) } diff --git a/auditbeat/scripts/mage/config.go b/auditbeat/scripts/mage/config.go index 85579440e53..46701636280 100644 --- a/auditbeat/scripts/mage/config.go +++ b/auditbeat/scripts/mage/config.go @@ -18,8 +18,6 @@ package mage import ( - "path/filepath" - "github.com/pkg/errors" "github.com/elastic/beats/dev-tools/mage" @@ -30,28 +28,24 @@ const ( configTemplateGlob = "module/*/_meta/config*.yml.tmpl" ) -// OSSConfigFileParams returns the parameters for generating OSS config. -func OSSConfigFileParams() mage.ConfigFileParams { - params, err := configFileParams(mage.OSSBeatDir()) - if err != nil { - panic(err) - } - return params -} - -// XPackConfigFileParams returns the parameters for generating X-Pack config. -func XPackConfigFileParams() mage.ConfigFileParams { - params, err := configFileParams(mage.OSSBeatDir(), mage.XPackBeatDir()) +// config generates short/reference/docker configs and populates the modules.d +// directory. +func config() error { + args, err := configFileParams() if err != nil { - panic(err) + return err } - return params + return mage.Config(mage.AllConfigTypes, args, ".") } -func configFileParams(dirs ...string) (mage.ConfigFileParams, error) { - var globs []string - for _, dir := range dirs { - globs = append(globs, filepath.Join(dir, configTemplateGlob)) +func configFileParams() (mage.ConfigFileParams, error) { + globs := []string{mage.OSSBeatDir(configTemplateGlob)} + switch SelectLogic { + case mage.OSSProject: + case mage.XPackProject: + globs = append(globs, mage.XPackBeatDir(configTemplateGlob)) + default: + panic(errors.Errorf("invalid SelectLogic value")) } configFiles, err := mage.FindFiles(globs...) diff --git a/auditbeat/scripts/mage/fields.go b/auditbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..c03aa21d87d --- /dev/null +++ b/auditbeat/scripts/mage/fields.go @@ -0,0 +1,86 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + "go.uber.org/multierr" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { + switch SelectLogic { + case mage.OSSProject: + return multierr.Combine( + b.commonFieldsGo(), + b.moduleFieldsGo(), + ) + case mage.XPackProject: + return b.moduleFieldsGo() + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (b fieldsBuilder) FieldsYML() error { + var modules []string + switch SelectLogic { + case mage.OSSProject: + modules = append(modules, mage.OSSBeatDir("module")) + case mage.XPackProject: + modules = append(modules, mage.OSSBeatDir("module"), mage.XPackBeatDir("module")) + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (b fieldsBuilder) FieldsAllYML() error { + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML, + mage.OSSBeatDir("module"), mage.XPackBeatDir("module")) +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} + +func (b fieldsBuilder) moduleFieldsGo() error { + return mage.GenerateModuleFieldsGo("module") +} diff --git a/auditbeat/scripts/mage/package.go b/auditbeat/scripts/mage/package.go index a06768f451f..7351cc24f7e 100644 --- a/auditbeat/scripts/mage/package.go +++ b/auditbeat/scripts/mage/package.go @@ -18,32 +18,56 @@ package mage import ( + "fmt" + "time" + + "github.com/magefile/mage/mg" "github.com/pkg/errors" "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" ) -// PackagingFlavor specifies the type of packaging (OSS vs X-Pack). -type PackagingFlavor uint8 +func init() { + mage.BeatDescription = "Audit the activities of users and processes on your system." +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() -// Packaging flavors. -const ( - OSSPackaging PackagingFlavor = iota - XPackPackaging -) + switch SelectLogic { + case mage.OSSProject: + mage.UseElasticBeatOSSPackaging() + case mage.XPackProject: + mage.UseElasticBeatXPackPackaging() + } + mage.PackageKibanaDashboardsFromBuildDir() + customizePackaging() + + mg.SerialDeps(Update.All) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} -// CustomizePackaging modifies the package specs to use templated config files +// customizePackaging modifies the package specs to use templated config files // instead of the defaults. // // Customizations specific to Auditbeat: // - Include audit.rules.d directory in packages. -func CustomizePackaging(pkgFlavor PackagingFlavor) { +// - Generate OS specific config files. +func customizePackaging() { var ( shortConfig = mage.PackageFile{ Mode: 0600, Source: "{{.PackageDir}}/auditbeat.yml", Dep: func(spec mage.PackageSpec) error { - return generateConfig(pkgFlavor, mage.ShortConfigType, spec) + return generateConfig(mage.ShortConfigType, spec) }, Config: true, } @@ -51,7 +75,7 @@ func CustomizePackaging(pkgFlavor PackagingFlavor) { Mode: 0644, Source: "{{.PackageDir}}/auditbeat.reference.yml", Dep: func(spec mage.PackageSpec) error { - return generateConfig(pkgFlavor, mage.ReferenceConfigType, spec) + return generateConfig(mage.ReferenceConfigType, spec) }, } ) @@ -108,15 +132,10 @@ func CustomizePackaging(pkgFlavor PackagingFlavor) { } } -func generateConfig(pkgFlavor PackagingFlavor, ct mage.ConfigFileType, spec mage.PackageSpec) error { - var args mage.ConfigFileParams - switch pkgFlavor { - case OSSPackaging: - args = OSSConfigFileParams() - case XPackPackaging: - args = XPackConfigFileParams() - default: - panic(errors.Errorf("Invalid packaging flavor (either oss or xpack): %v", pkgFlavor)) +func generateConfig(ct mage.ConfigFileType, spec mage.PackageSpec) error { + args, err := configFileParams() + if err != nil { + return err } // PackageDir isn't exported but we can grab it's value this way. diff --git a/auditbeat/scripts/mage/docs.go b/auditbeat/scripts/mage/update.go similarity index 56% rename from auditbeat/scripts/mage/docs.go rename to auditbeat/scripts/mage/update.go index 1b0380e407b..c764d69d310 100644 --- a/auditbeat/scripts/mage/docs.go +++ b/auditbeat/scripts/mage/update.go @@ -22,14 +22,83 @@ import ( "path/filepath" "strings" + "github.com/magefile/mage/mg" "github.com/magefile/mage/sh" "github.com/pkg/errors" "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/docs" + "github.com/elastic/beats/dev-tools/mage/target/integtest" + "github.com/elastic/beats/dev-tools/mage/target/unittest" ) +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + docs.RegisterDeps(Update.FieldDocs, Update.ModuleDocs) + + unittest.RegisterGoTestDeps(Update.Fields) + unittest.RegisterPythonTestDeps(Update.Fields) + + integtest.RegisterPythonTestDeps(Update.Fields, Update.Dashboards) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Dashboards, Update.Config, + Update.Includes, Update.ModuleDocs, Update.FieldDocs) +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// Includes updates include/list.go. +func (Update) Includes() error { + return mage.GenerateModuleIncludeListGo() +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + switch SelectLogic { + case mage.OSSProject: + return mage.KibanaDashboards("module") + case mage.XPackProject: + return mage.KibanaDashboards(mage.OSSBeatDir("module"), "module") + default: + panic(mage.ErrUnknownProjectType) + } +} + +// FieldDocs generates docs/fields.asciidoc containing all fields (including +// x-pack). +func (Update) FieldDocs() error { + mg.Deps(fb.FieldsAllYML) + return mage.Docs.FieldDocs(mage.FieldsAllYML) +} + // ModuleDocs collects documentation from modules (both OSS and X-Pack). -func ModuleDocs() error { +func (Update) ModuleDocs() error { dirsWithModules := []string{ mage.OSSBeatDir(), mage.XPackBeatDir(), @@ -87,17 +156,3 @@ func ModuleDocs() error { return sh.Run(python, args...) } - -// FieldDocs generates docs/fields.asciidoc containing all fields -// (including x-pack). -func FieldDocs() error { - inputs := []string{ - mage.OSSBeatDir("module"), - mage.XPackBeatDir("module"), - } - output := mage.CreateDir("build/fields/fields.all.yml") - if err := mage.GenerateFieldsYAMLTo(output, inputs...); err != nil { - return err - } - return mage.Docs.FieldDocs(output) -} diff --git a/x-pack/auditbeat/.gitignore b/x-pack/auditbeat/.gitignore deleted file mode 100644 index 1a331320986..00000000000 --- a/x-pack/auditbeat/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -/auditbeat -/auditbeat.test -/data -/fields.yml diff --git a/x-pack/auditbeat/Makefile b/x-pack/auditbeat/Makefile index 56633e2b3e5..7427a5c672b 100644 --- a/x-pack/auditbeat/Makefile +++ b/x-pack/auditbeat/Makefile @@ -1,3 +1,4 @@ -ES_BEATS ?= ../.. - -include $(ES_BEATS)/dev-tools/make/xpack.mk +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/auditbeat/magefile.go b/x-pack/auditbeat/magefile.go index b2c8ac9e8b7..93199662e50 100644 --- a/x-pack/auditbeat/magefile.go +++ b/x-pack/auditbeat/magefile.go @@ -7,169 +7,34 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" - auditbeat "github.com/elastic/beats/auditbeat/scripts/mage" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + auditbeat "github.com/elastic/beats/auditbeat/scripts/mage" ) func init() { - mage.BeatDescription = "Audit the activities of users and processes on your system." - mage.BeatLicense = "Elastic License" -} - -// Aliases provides compatibility with CI while we transition all Beats -// to having common testing targets. -var Aliases = map[string]interface{}{ - "goTestUnit": GoUnitTest, // dev-tools/jenkins_ci.ps1 uses this. -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatXPackPackaging() - mage.PackageKibanaDashboardsFromBuildDir() - auditbeat.CustomizePackaging(auditbeat.XPackPackaging) - - mg.SerialDeps(Fields, Dashboards, Config, mage.GenerateModuleIncludeListGo) - mg.Deps(CrossBuild, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Update is an alias for running fields, dashboards, config. -func Update() { - mg.SerialDeps(Fields, Dashboards, Config, mage.GenerateModuleIncludeListGo) -} - -// Config generates both the short and reference configs. -func Config() error { - return mage.Config(mage.AllConfigTypes, auditbeat.XPackConfigFileParams(), ".") -} - -// Fields generates a fields.yml and include/fields.go. -func Fields() { - mg.SerialDeps(fieldsYML, moduleFieldsGo) -} - -func moduleFieldsGo() error { - return mage.GenerateModuleFieldsGo("module") -} + auditbeat.SelectLogic = mage.XPackProject -// fieldsYML generates the fields.yml file containing all fields. -func fieldsYML() error { - return mage.GenerateFieldsYAML(mage.OSSBeatDir("module"), "module") -} - -// ExportDashboard exports a dashboard and writes it into the correct directory. -// -// Required environment variables: -// - MODULE: Name of the module -// - ID: Dashboard id -func ExportDashboard() error { - return mage.ExportDashboard() -} - -// Dashboards collects all the dashboards and generates index patterns. -func Dashboards() error { - return mage.KibanaDashboards(mage.OSSBeatDir("module"), "module") -} - -// Fmt formats source code and adds file headers. -func Fmt() { - mg.Deps(mage.Format) -} - -// Check runs fmt and update then returns an error if any modifications are found. -func Check() { - mg.SerialDeps(mage.Format, Update, mage.Check) -} - -// IntegTest executes integration tests (it uses Docker to run the tests). -func IntegTest() { - mage.AddIntegTestUsage() - defer mage.StopIntegTestEnv() - mg.SerialDeps(GoIntegTest, PythonIntegTest) -} - -// UnitTest executes the unit tests. -func UnitTest() { - mg.SerialDeps(GoUnitTest, PythonUnitTest) -} - -// GoUnitTest executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoUnitTest(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoIntegTest executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoIntegTest(ctx context.Context) error { - return mage.RunIntegTest("goIntegTest", func() error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) - }) -} - -// PythonUnitTest executes the python system tests. -func PythonUnitTest() error { - mg.SerialDeps(Fields, mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestUnitArgs()) + mage.BeatLicense = "Elastic License" } -// PythonIntegTest executes the python system tests in the integration environment (Docker). -func PythonIntegTest(ctx context.Context) error { - if !mage.IsInIntegTestEnv() { - mg.Deps(Fields) - } - return mage.RunIntegTest("pythonIntegTest", func() error { - mg.Deps(mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestIntegrationArgs()) - }) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(auditbeat.Update.All) } diff --git a/x-pack/auditbeat/make.bat b/x-pack/auditbeat/make.bat new file mode 100644 index 00000000000..81de1ba946f --- /dev/null +++ b/x-pack/auditbeat/make.bat @@ -0,0 +1,11 @@ +@echo off + +REM Windows wrapper for Mage (https://magefile.org/) that installs it +REM to %GOPATH%\bin from the Beats vendor directory. +REM +REM After running this once you may invoke mage.exe directly. + +WHERE mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage + +mage %* From a579379e482ae0a45d6a22f40d959f1b5e1ea282 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:53:38 -0500 Subject: [PATCH 04/18] Refactor filebeat build logic --- filebeat/Makefile | 22 +-- filebeat/docker-compose.yml | 6 +- filebeat/include/fields.go | 2 +- filebeat/inputsource/tcp/server_test.go | 2 +- filebeat/magefile.go | 217 +++--------------------- filebeat/scripts/mage/config.go | 48 +++--- filebeat/scripts/mage/docs.go | 43 ----- filebeat/scripts/mage/fields.go | 101 +++++++++++ filebeat/scripts/mage/package.go | 77 ++++++--- filebeat/scripts/mage/test.go | 58 +++++++ filebeat/scripts/mage/update.go | 144 ++++++++++++++++ filebeat/tests/system/test_ml.py | 2 +- filebeat/tests/system/test_pipeline.py | 2 +- x-pack/filebeat/Makefile | 7 +- x-pack/filebeat/docker-compose.yml | 6 +- x-pack/filebeat/magefile.go | 191 +++------------------ 16 files changed, 445 insertions(+), 483 deletions(-) delete mode 100644 filebeat/scripts/mage/docs.go create mode 100644 filebeat/scripts/mage/fields.go create mode 100644 filebeat/scripts/mage/test.go create mode 100644 filebeat/scripts/mage/update.go diff --git a/filebeat/Makefile b/filebeat/Makefile index 6e37e664325..493e3a99e09 100644 --- a/filebeat/Makefile +++ b/filebeat/Makefile @@ -1,16 +1,16 @@ -BEAT_NAME?=filebeat -BEAT_TITLE?=Filebeat -SYSTEM_TESTS?=true -TEST_ENVIRONMENT?=true -GOX_FLAGS=-arch="amd64 386 arm ppc64 ppc64le" -ES_BEATS?=.. -EXCLUDE_COMMON_UPDATE_TARGET=true +# +# Variables +# +GOX_FLAGS := -arch="amd64 386 arm ppc64 ppc64le" -include ${ES_BEATS}/libbeat/scripts/Makefile +# +# Includes +# +include ../dev-tools/make/oss.mk -.PHONY: update -update: mage - mage update +# +# Targets +# # Creates a new module. Requires the params MODULE. .PHONY: create-module diff --git a/filebeat/docker-compose.yml b/filebeat/docker-compose.yml index 8cd7bf1a06b..63165b41611 100644 --- a/filebeat/docker-compose.yml +++ b/filebeat/docker-compose.yml @@ -5,13 +5,9 @@ services: depends_on: - proxy_dep env_file: + - ${PWD}/../testing/environments/test.env - ${PWD}/input/redis/_meta/env environment: - - BEAT_STRICT_PERMS=false - - ES_HOST=elasticsearch - - ES_PORT=9200 - - ES_USER=beats - - ES_PASS=testing - KIBANA_HOST=kibana - KIBANA_PORT=5601 working_dir: /go/src/github.com/elastic/beats/filebeat diff --git a/filebeat/include/fields.go b/filebeat/include/fields.go index a93ca3d430c..3ea91be19fb 100644 --- a/filebeat/include/fields.go +++ b/filebeat/include/fields.go @@ -24,7 +24,7 @@ import ( ) func init() { - if err := asset.SetFields("filebeat", "fields.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("filebeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } diff --git a/filebeat/inputsource/tcp/server_test.go b/filebeat/inputsource/tcp/server_test.go index c1941578fc9..73829a58de3 100644 --- a/filebeat/inputsource/tcp/server_test.go +++ b/filebeat/inputsource/tcp/server_test.go @@ -197,7 +197,7 @@ func TestReceiveNewEventsConcurrently(t *testing.T) { to := func(message []byte, mt inputsource.NetworkMetadata) { ch <- &info{message: string(message), mt: mt} } - cfg, err := common.NewConfigFrom(map[string]interface{}{"host": ":0"}) + cfg, err := common.NewConfigFrom(map[string]interface{}{"host": "localhost:0"}) if !assert.NoError(t, err) { return } diff --git a/filebeat/magefile.go b/filebeat/magefile.go index 1a2d076360b..8b894cb861e 100644 --- a/filebeat/magefile.go +++ b/filebeat/magefile.go @@ -20,209 +20,32 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import filebeat "github.com/elastic/beats/filebeat/scripts/mage" ) func init() { - mage.BeatDescription = "Filebeat sends log files to Logstash or directly to Elasticsearch." -} - -// Aliases provides compatibility with CI while we transition all Beats -// to having common testing targets. -var Aliases = map[string]interface{}{ - "goTestUnit": GoUnitTest, // dev-tools/jenkins_ci.ps1 uses this. -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatOSSPackaging() - mage.PackageKibanaDashboardsFromBuildDir() - filebeat.CustomizePackaging() - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages(mage.WithModules(), mage.WithModulesD()) -} - -// Update is an alias for executing fields, dashboards, config, includes. -func Update() { - mg.SerialDeps(Fields, Dashboards, Config, includeList, fieldDocs, - filebeat.CollectDocs, - filebeat.PrepareModulePackagingOSS) -} - -// Config generates both the short/reference/docker configs and populates the -// modules.d directory. -func Config() { - mg.Deps(mage.GenerateDirModulesD, configYML) -} - -func configYML() error { - return mage.Config(mage.AllConfigTypes, filebeat.OSSConfigFileParams(), ".") -} - -// includeList generates include/list.go with imports for inputs. -func includeList() error { - return mage.GenerateIncludeListGo([]string{"input/*"}, []string{"module"}) + filebeat.SelectLogic = mage.OSSProject } -// Fields generates fields.yml and fields.go files for the Beat. -func Fields() { - mg.Deps(libbeatAndFilebeatCommonFieldsGo, moduleFieldsGo) - mg.Deps(fieldsYML) -} - -// libbeatAndFilebeatCommonFieldsGo generates a fields.go containing both -// libbeat and filebeat's common fields. -func libbeatAndFilebeatCommonFieldsGo() error { - if err := mage.GenerateFieldsYAML(); err != nil { - return err - } - return mage.GenerateAllInOneFieldsGo() -} - -// moduleFieldsGo generates a fields.go for each module. -func moduleFieldsGo() error { - return mage.GenerateModuleFieldsGo("module") -} - -// fieldsYML generates the fields.yml file containing all fields. -func fieldsYML() error { - return mage.GenerateFieldsYAML("module") -} - -// fieldDocs generates docs/fields.asciidoc containing all fields -// (including x-pack). -func fieldDocs() error { - inputs := []string{ - mage.OSSBeatDir("module"), - mage.XPackBeatDir("module"), - mage.OSSBeatDir("input"), - mage.XPackBeatDir("input"), - } - output := mage.CreateDir("build/fields/fields.all.yml") - if err := mage.GenerateFieldsYAMLTo(output, inputs...); err != nil { - return err - } - return mage.Docs.FieldDocs(output) -} - -// Dashboards collects all the dashboards and generates index patterns. -func Dashboards() error { - return mage.KibanaDashboards("module") -} - -// ExportDashboard exports a dashboard and writes it into the correct directory. -// -// Required environment variables: -// - MODULE: Name of the module -// - ID: Dashboard id -func ExportDashboard() error { - return mage.ExportDashboard() -} - -// Fmt formats source code and adds file headers. -func Fmt() { - mg.Deps(mage.Format) -} - -// Check runs fmt and update then returns an error if any modifications are found. -func Check() { - mg.SerialDeps(mage.Format, Update, mage.Check) -} - -// IntegTest executes integration tests (it uses Docker to run the tests). -func IntegTest() { - mage.AddIntegTestUsage() - defer mage.StopIntegTestEnv() - mg.SerialDeps(GoIntegTest, PythonIntegTest) -} - -// UnitTest executes the unit tests. -func UnitTest() { - mg.SerialDeps(GoUnitTest, PythonUnitTest) -} - -// GoUnitTest executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoUnitTest(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoIntegTest executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoIntegTest(ctx context.Context) error { - return mage.RunIntegTest("goIntegTest", func() error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) - }) -} - -// PythonUnitTest executes the python system tests. -func PythonUnitTest() error { - mg.Deps(mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestUnitArgs()) -} - -// PythonIntegTest executes the python system tests in the integration environment (Docker). -func PythonIntegTest(ctx context.Context) error { - if !mage.IsInIntegTestEnv() { - mg.Deps(Fields) - } - return mage.RunIntegTest("pythonIntegTest", func() error { - mg.Deps(mage.BuildSystemTestBinary) - args := mage.DefaultPythonTestIntegrationArgs() - args.Env["MODULES_PATH"] = mage.CWD("module") - return mage.PythonNoseTest(args) - }) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(filebeat.Update.All) } diff --git a/filebeat/scripts/mage/config.go b/filebeat/scripts/mage/config.go index 6b7cfc6f93a..7a62b1527c9 100644 --- a/filebeat/scripts/mage/config.go +++ b/filebeat/scripts/mage/config.go @@ -21,7 +21,32 @@ import ( "github.com/elastic/beats/dev-tools/mage" ) -const modulesConfigYml = "build/config.modules.yml" +const ( + modulesConfigYml = "build/config.modules.yml" +) + +// config generates short/reference/docker configs and populates the modules.d +// directory. +func config() error { + var args mage.ConfigFileParams + switch SelectLogic { + case mage.OSSProject: + args = configFileParams(mage.OSSBeatDir("module")) + case mage.XPackProject: + args = configFileParams(mage.OSSBeatDir("module"), "module") + args.ReferenceParts = []string{ + mage.OSSBeatDir("_meta/common.reference.p1.yml"), + modulesConfigYml, + mage.OSSBeatDir("_meta/common.reference.inputs.yml"), + "_meta/common.reference.inputs.yml", // Added only to X-Pack. + mage.OSSBeatDir("_meta/common.reference.p2.yml"), + mage.LibbeatDir("_meta/config.reference.yml"), + } + default: + panic(mage.ErrUnknownProjectType) + } + return mage.Config(mage.AllConfigTypes, args, ".") +} func configFileParams(moduleDirs ...string) mage.ConfigFileParams { collectModuleConfig := func() error { @@ -48,24 +73,3 @@ func configFileParams(moduleDirs ...string) mage.ConfigFileParams { }, } } - -// OSSConfigFileParams returns the default ConfigFileParams for generating -// filebeat*.yml files. -func OSSConfigFileParams(moduleDirs ...string) mage.ConfigFileParams { - return configFileParams(mage.OSSBeatDir("module")) -} - -// XPackConfigFileParams returns the default ConfigFileParams for generating -// filebeat*.yml files. -func XPackConfigFileParams() mage.ConfigFileParams { - args := configFileParams(mage.OSSBeatDir("module"), "module") - args.ReferenceParts = []string{ - mage.OSSBeatDir("_meta/common.reference.p1.yml"), - modulesConfigYml, - mage.OSSBeatDir("_meta/common.reference.inputs.yml"), - "_meta/common.reference.inputs.yml", // Added only to X-Pack. - mage.OSSBeatDir("_meta/common.reference.p2.yml"), - mage.LibbeatDir("_meta/config.reference.yml"), - } - return args -} diff --git a/filebeat/scripts/mage/docs.go b/filebeat/scripts/mage/docs.go deleted file mode 100644 index bd6bc0d3512..00000000000 --- a/filebeat/scripts/mage/docs.go +++ /dev/null @@ -1,43 +0,0 @@ -// Licensed to Elasticsearch B.V. under one or more contributor -// license agreements. See the NOTICE file distributed with -// this work for additional information regarding copyright -// ownership. Elasticsearch B.V. licenses this file to you under -// the Apache License, Version 2.0 (the "License"); you may -// not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -package mage - -import ( - "github.com/magefile/mage/sh" - - "github.com/elastic/beats/dev-tools/mage" -) - -// CollectDocs executes the Filebeat docs_collector script to collect/generate -// documentation from each module. -func CollectDocs() error { - ve, err := mage.PythonVirtualenv() - if err != nil { - return err - } - - python, err := mage.LookVirtualenvPath(ve, "python") - if err != nil { - return err - } - - // TODO: Port this script to Go. - return sh.Run(python, - mage.OSSBeatDir("scripts/docs_collector.py"), - "--beat", mage.BeatName) -} diff --git a/filebeat/scripts/mage/fields.go b/filebeat/scripts/mage/fields.go new file mode 100644 index 00000000000..6a680a18ff6 --- /dev/null +++ b/filebeat/scripts/mage/fields.go @@ -0,0 +1,101 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + "go.uber.org/multierr" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { + switch SelectLogic { + case mage.OSSProject: + return multierr.Combine( + b.commonFieldsGo(), + b.moduleFieldsGo(), + ) + case mage.XPackProject: + return multierr.Combine( + b.inputFieldsGo(), + b.moduleFieldsGo(), + ) + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (b fieldsBuilder) FieldsYML() error { + var modules []string + switch SelectLogic { + case mage.OSSProject: + modules = append(modules, mage.OSSBeatDir("module")) + case mage.XPackProject: + modules = append(modules, + mage.OSSBeatDir("module"), + mage.XPackBeatDir("module"), + mage.XPackBeatDir("input"), + ) + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (b fieldsBuilder) FieldsAllYML() error { + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML, + mage.OSSBeatDir("module"), + mage.XPackBeatDir("module"), + mage.XPackBeatDir("input"), + ) +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} + +func (b fieldsBuilder) moduleFieldsGo() error { + return mage.GenerateModuleFieldsGo("module") +} + +// inputFieldsGo generates a fields.go for each Filebeat input type. +func (b fieldsBuilder) inputFieldsGo() error { + return mage.GenerateModuleFieldsGo("input") +} diff --git a/filebeat/scripts/mage/package.go b/filebeat/scripts/mage/package.go index 5f39e0937e0..c35e9dc6614 100644 --- a/filebeat/scripts/mage/package.go +++ b/filebeat/scripts/mage/package.go @@ -18,9 +18,15 @@ package mage import ( + "fmt" + "time" + "github.com/magefile/mage/mg" "github.com/pkg/errors" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" + "github.com/elastic/beats/dev-tools/mage" ) @@ -29,10 +35,36 @@ const ( dirModulesDGenerated = "build/package/modules.d" ) -// CustomizePackaging modifies the package specs to add the modules and +func init() { + mage.BeatDescription = "Filebeat sends log files to Logstash or directly to Elasticsearch." +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() + + switch SelectLogic { + case mage.OSSProject: + mage.UseElasticBeatOSSPackaging() + case mage.XPackProject: + mage.UseElasticBeatXPackPackaging() + } + mage.PackageKibanaDashboardsFromBuildDir() + customizePackaging() + + mg.Deps(Update.All, prepareModulePackaging) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} + +// customizePackaging modifies the package specs to add the modules and // modules.d directory. You must declare a dependency on either // PrepareModulePackagingOSS or PrepareModulePackagingXPack. -func CustomizePackaging() { +func customizePackaging() { var ( moduleTarget = "module" module = mage.PackageFile{ @@ -69,31 +101,32 @@ func CustomizePackaging() { } } -// PrepareModulePackagingOSS generates build/package/modules and -// build/package/modules.d directories for use in packaging. -func PrepareModulePackagingOSS() error { - return prepareModulePackaging([]struct{ Src, Dst string }{ - {mage.OSSBeatDir("module"), dirModuleGenerated}, - {mage.OSSBeatDir("modules.d"), dirModulesDGenerated}, - }...) -} - -// PrepareModulePackagingXPack generates build/package/modules and +// prepareModulePackaging generates build/package/modules and // build/package/modules.d directories for use in packaging. -func PrepareModulePackagingXPack() error { - return prepareModulePackaging([]struct{ Src, Dst string }{ - {mage.OSSBeatDir("module"), dirModuleGenerated}, - {"module", dirModuleGenerated}, - {mage.OSSBeatDir("modules.d"), dirModulesDGenerated}, - {"modules.d", dirModulesDGenerated}, - }...) +func prepareModulePackaging() error { + switch SelectLogic { + case mage.OSSProject: + return _prepareModulePackaging([]struct{ Src, Dst string }{ + {mage.OSSBeatDir("module"), dirModuleGenerated}, + {mage.OSSBeatDir("modules.d"), dirModulesDGenerated}, + }...) + case mage.XPackProject: + return _prepareModulePackaging([]struct{ Src, Dst string }{ + {mage.OSSBeatDir("module"), dirModuleGenerated}, + {"module", dirModuleGenerated}, + {mage.OSSBeatDir("modules.d"), dirModulesDGenerated}, + {"modules.d", dirModulesDGenerated}, + }...) + default: + panic(mage.ErrUnknownProjectType) + } } -// prepareModulePackaging generates build/package/modules and +// _prepareModulePackaging generates build/package/modules and // build/package/modules.d directories for use in packaging. -func prepareModulePackaging(files ...struct{ Src, Dst string }) error { +func _prepareModulePackaging(files ...struct{ Src, Dst string }) error { // This depends on the modules.d directory being up-to-date. - mg.Deps(mage.GenerateDirModulesD) + mg.Deps(Update.ModulesD) // Clean any existing generated directories. if err := mage.Clean([]string{dirModuleGenerated, dirModulesDGenerated}); err != nil { diff --git a/filebeat/scripts/mage/test.go b/filebeat/scripts/mage/test.go new file mode 100644 index 00000000000..ee63384df86 --- /dev/null +++ b/filebeat/scripts/mage/test.go @@ -0,0 +1,58 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "context" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" +) + +// IntegTest executes integration tests (it uses Docker to run the tests). +func IntegTest() { + mage.AddIntegTestUsage() + defer mage.StopIntegTestEnv() + mg.SerialDeps(GoIntegTest, PythonIntegTest) +} + +// GoIntegTest executes the Go integration tests. +// Use TEST_COVERAGE=true to enable code coverage profiling. +// Use RACE_DETECTOR=true to enable the race detector. +func GoIntegTest(ctx context.Context) error { + return mage.RunIntegTest("goIntegTest", func() error { + return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) + }) +} + +// PythonIntegTest executes the python system tests in the integration +// environment (Docker). +func PythonIntegTest(ctx context.Context) error { + if !mage.IsInIntegTestEnv() { + mg.SerialDeps(Update.Fields, Update.Dashboards) + } + return mage.RunIntegTest("pythonIntegTest", func() error { + mg.Deps(mage.BuildSystemTestBinary) + args := mage.DefaultPythonTestIntegrationArgs() + // NOTE: This is the reason why the common integtest package is + // not being used. + args.Env["MODULES_PATH"] = mage.CWD("module") + return mage.PythonNoseTest(args) + }, "GENERATE", "TESTING_FILEBEAT_MODULES") +} diff --git a/filebeat/scripts/mage/update.go b/filebeat/scripts/mage/update.go new file mode 100644 index 00000000000..c9a54aa67d0 --- /dev/null +++ b/filebeat/scripts/mage/update.go @@ -0,0 +1,144 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "log" + "os" + "os/exec" + "strings" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/docs" + "github.com/elastic/beats/dev-tools/mage/target/test" + "github.com/elastic/beats/dev-tools/mage/target/unittest" + + "github.com/elastic/beats/dev-tools/mage" +) + +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + docs.RegisterDeps(Update.FieldDocs, Update.ModuleDocs) + + unittest.RegisterGoTestDeps(Update.Fields) + unittest.RegisterPythonTestDeps(Update.Fields) + + test.RegisterDeps(IntegTest) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Dashboards, Update.Config, + Update.Includes, Update.ModulesD, Update.ModuleDocs, Update.FieldDocs) +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + switch SelectLogic { + case mage.OSSProject: + return mage.KibanaDashboards("module") + case mage.XPackProject: + return mage.KibanaDashboards(mage.OSSBeatDir("module"), "module") + default: + panic(mage.ErrUnknownProjectType) + } +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// Includes updates include/list.go. +func (Update) Includes() error { + return mage.GenerateIncludeListGo([]string{"input/*"}, []string{"module"}) +} + +// ModulesD updates the modules.d directory. +func (Update) ModulesD() error { + return mage.GenerateDirModulesD() +} + +// ModuleDocs collects documentation from modules (both OSS and X-Pack). +func (Update) ModuleDocs() error { + ve, err := mage.PythonVirtualenv() + if err != nil { + return err + } + + python, err := mage.LookVirtualenvPath(ve, "python") + if err != nil { + return err + } + + if err = os.RemoveAll(mage.OSSBeatDir("docs/modules")); err != nil { + return err + } + if err = os.MkdirAll(mage.OSSBeatDir("docs/modules"), 0755); err != nil { + return err + } + + // TODO: Port this script to Go. + + // Warning: This script does NOT work outside of the OSS filebeat directory + // because it was not written in a portable manner. + return runIn(mage.OSSBeatDir(), python, + mage.OSSBeatDir("scripts/docs_collector.py"), + "--beat", mage.BeatName) +} + +// FieldDocs generates docs/fields.asciidoc containing all fields (including +// x-pack). +func (Update) FieldDocs() error { + mg.Deps(fb.FieldsAllYML) + return mage.Docs.FieldDocs(mage.FieldsAllYML) +} + +func runIn(dir, cmd string, args ...string) error { + c := exec.Command(cmd, args...) + c.Dir = dir + c.Env = os.Environ() + c.Stderr = os.Stderr + if mg.Verbose() { + c.Stdout = os.Stdout + } + c.Stdin = os.Stdin + log.Printf("exec: (pwd=%v) %v %v", dir, cmd, strings.Join(args, " ")) + return c.Run() +} diff --git a/filebeat/tests/system/test_ml.py b/filebeat/tests/system/test_ml.py index 7c798b155d7..d6f69356662 100644 --- a/filebeat/tests/system/test_ml.py +++ b/filebeat/tests/system/test_ml.py @@ -23,7 +23,7 @@ def init(self): "/../../../../module") self.kibana_path = os.path.abspath(self.working_dir + - "/../../../../_meta/kibana.generated") + "/../../../../build/kibana") self.filebeat = os.path.abspath(self.working_dir + "/../../../../filebeat.test") diff --git a/filebeat/tests/system/test_pipeline.py b/filebeat/tests/system/test_pipeline.py index 8058306a95d..1cdd8ddca07 100644 --- a/filebeat/tests/system/test_pipeline.py +++ b/filebeat/tests/system/test_pipeline.py @@ -21,7 +21,7 @@ def init(self): "/../../../../module") self.kibana_path = os.path.abspath(self.working_dir + - "/../../../../_meta/kibana.generated") + "/../../../../build/kibana") self.filebeat = os.path.abspath(self.working_dir + "/../../../../filebeat.test") diff --git a/x-pack/filebeat/Makefile b/x-pack/filebeat/Makefile index 56633e2b3e5..7427a5c672b 100644 --- a/x-pack/filebeat/Makefile +++ b/x-pack/filebeat/Makefile @@ -1,3 +1,4 @@ -ES_BEATS ?= ../.. - -include $(ES_BEATS)/dev-tools/make/xpack.mk +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/filebeat/docker-compose.yml b/x-pack/filebeat/docker-compose.yml index 8a4f6f4a240..0cbd089a520 100644 --- a/x-pack/filebeat/docker-compose.yml +++ b/x-pack/filebeat/docker-compose.yml @@ -4,10 +4,8 @@ services: build: ../../filebeat depends_on: - proxy_dep - environment: - - BEAT_STRICT_PERMS=false - - ES_HOST=elasticsearch - - ES_PORT=9200 + env_file: + - ${PWD}/../../testing/environments/test.env working_dir: /go/src/github.com/elastic/beats/x-pack/filebeat volumes: - ${PWD}/../..:/go/src/github.com/elastic/beats/ diff --git a/x-pack/filebeat/magefile.go b/x-pack/filebeat/magefile.go index f3b6f75ca46..9da8d5c25dd 100644 --- a/x-pack/filebeat/magefile.go +++ b/x-pack/filebeat/magefile.go @@ -7,185 +7,32 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import filebeat "github.com/elastic/beats/filebeat/scripts/mage" ) func init() { - mage.BeatDescription = "Filebeat sends log files to Logstash or directly to Elasticsearch." - mage.BeatLicense = "Elastic License" -} - -// Aliases provides compatibility with CI while we transition all Beats -// to having common testing targets. -var Aliases = map[string]interface{}{ - "goTestUnit": GoUnitTest, // dev-tools/jenkins_ci.ps1 uses this. -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild builds the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatXPackPackaging() - mage.PackageKibanaDashboardsFromBuildDir() - filebeat.CustomizePackaging() - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Fields generates the fields.yml file and a fields.go for each module and -// input. -func Fields() { - mg.Deps(fieldsYML, moduleFieldsGo, inputFieldsGo) -} - -func inputFieldsGo() error { - return mage.GenerateModuleFieldsGo("input") -} - -func moduleFieldsGo() error { - return mage.GenerateModuleFieldsGo("module") -} - -// fieldsYML generates a fields.yml based on filebeat + x-pack/filebeat/modules. -func fieldsYML() error { - return mage.GenerateFieldsYAML(mage.OSSBeatDir("module"), "module", "input") -} - -// Dashboards collects all the dashboards and generates index patterns. -func Dashboards() error { - return mage.KibanaDashboards(mage.OSSBeatDir("module"), "module") -} - -// ExportDashboard exports a dashboard and writes it into the correct directory. -// -// Required environment variables: -// - MODULE: Name of the module -// - ID: Dashboard id -func ExportDashboard() error { - return mage.ExportDashboard() -} - -// Config generates both the short and reference configs. -func Config() { - mg.Deps(configYML, mage.GenerateDirModulesD) -} - -func configYML() error { - return mage.Config(mage.AllConfigTypes, filebeat.XPackConfigFileParams(), ".") -} - -// Update is an alias for executing fields, dashboards, config. -func Update() { - mg.SerialDeps(Fields, Dashboards, Config, includeList, - filebeat.PrepareModulePackagingXPack) -} - -func includeList() error { - return mage.GenerateIncludeListGo([]string{"input/*"}, []string{"module"}) -} + filebeat.SelectLogic = mage.XPackProject -// Fmt formats source code and adds file headers. -func Fmt() { - mg.Deps(mage.Format) -} - -// Check runs fmt and update then returns an error if any modifications are found. -func Check() { - mg.SerialDeps(mage.Format, Update, mage.Check) -} - -// IntegTest executes integration tests (it uses Docker to run the tests). -func IntegTest() { - mage.AddIntegTestUsage() - defer mage.StopIntegTestEnv() - mg.SerialDeps(GoIntegTest, PythonIntegTest) -} - -// UnitTest executes the unit tests. -func UnitTest() { - mg.SerialDeps(GoUnitTest, PythonUnitTest) -} - -// GoUnitTest executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoUnitTest(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoIntegTest executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoIntegTest(ctx context.Context) error { - return mage.RunIntegTest("goIntegTest", func() error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) - }) -} - -// PythonUnitTest executes the python system tests. -func PythonUnitTest() error { - mg.Deps(mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestUnitArgs()) + mage.BeatLicense = "Elastic License" } -// PythonIntegTest executes the python system tests in the integration environment (Docker). -func PythonIntegTest(ctx context.Context) error { - if !mage.IsInIntegTestEnv() { - mg.Deps(Fields) - } - return mage.RunIntegTest("pythonIntegTest", func() error { - mg.Deps(mage.BuildSystemTestBinary) - args := mage.DefaultPythonTestIntegrationArgs() - args.Env["MODULES_PATH"] = mage.CWD("module") - return mage.PythonNoseTest(args) - }) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(filebeat.Update.All) } From 4026e43eba7d041a2b02717b96271ee860022bc5 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:54:49 -0500 Subject: [PATCH 05/18] Refactor heartbeat build logic --- heartbeat/Makefile | 28 +- heartbeat/cmd/root.go | 2 +- heartbeat/docker-compose.yml | 2 +- heartbeat/include/fields.go | 4 +- .../defaults/default.go => include/list.go} | 12 +- heartbeat/magefile.go | 133 +- heartbeat/monitors/active/dialchain/fields.go | 35 + heartbeat/monitors/active/http/fields.go | 35 + heartbeat/monitors/active/icmp/fields.go | 35 + heartbeat/monitors/active/tcp/fields.go | 35 + heartbeat/scripts/generate_imports_helper.py | 39 - heartbeat/scripts/mage/config.go | 44 + heartbeat/scripts/mage/fields.go | 87 + heartbeat/scripts/mage/package.go | 86 + heartbeat/scripts/mage/update.go | 77 + x-pack/heartbeat/Makefile | 4 + x-pack/heartbeat/heartbeat.docker.yml | 31 + x-pack/heartbeat/heartbeat.reference.yml | 1397 +++++++++++++++++ x-pack/heartbeat/heartbeat.yml | 164 ++ x-pack/heartbeat/magefile.go | 32 + x-pack/heartbeat/make.bat | 11 + 21 files changed, 2108 insertions(+), 185 deletions(-) rename heartbeat/{monitors/defaults/default.go => include/list.go} (76%) create mode 100644 heartbeat/monitors/active/dialchain/fields.go create mode 100644 heartbeat/monitors/active/http/fields.go create mode 100644 heartbeat/monitors/active/icmp/fields.go create mode 100644 heartbeat/monitors/active/tcp/fields.go delete mode 100644 heartbeat/scripts/generate_imports_helper.py create mode 100644 heartbeat/scripts/mage/config.go create mode 100644 heartbeat/scripts/mage/fields.go create mode 100644 heartbeat/scripts/mage/package.go create mode 100644 heartbeat/scripts/mage/update.go create mode 100644 x-pack/heartbeat/Makefile create mode 100644 x-pack/heartbeat/heartbeat.docker.yml create mode 100644 x-pack/heartbeat/heartbeat.reference.yml create mode 100644 x-pack/heartbeat/heartbeat.yml create mode 100644 x-pack/heartbeat/magefile.go create mode 100644 x-pack/heartbeat/make.bat diff --git a/heartbeat/Makefile b/heartbeat/Makefile index dde71c06312..0326f3e977d 100644 --- a/heartbeat/Makefile +++ b/heartbeat/Makefile @@ -1,24 +1,4 @@ -BEAT_NAME=heartbeat -BEAT_TITLE=Heartbeat -SYSTEM_TESTS=true -TEST_ENVIRONMENT?=true - -# Path to the libbeat Makefile --include ../libbeat/scripts/Makefile - -# Collects all dependencies and then calls update -.PHONY: collect -collect: imports kibana - -# Generate imports for all monitors -.PHONY: imports -imports: python-env - @mkdir -p include - @${PYTHON_ENV}/bin/python ${ES_BEATS}/script/generate_imports.py --out monitors/defaults/default.go ${BEAT_PATH} - -# Collects all module dashboards -.PHONY: kibana -kibana: - @rm -rf _meta/kibana.generated - @mkdir -p _meta/kibana.generated - @-cp -r monitors/active/*/_meta/kibana/* _meta/kibana.generated +# +# Includes +# +include ../dev-tools/make/oss.mk diff --git a/heartbeat/cmd/root.go b/heartbeat/cmd/root.go index 0ffda01644e..69c2b409715 100644 --- a/heartbeat/cmd/root.go +++ b/heartbeat/cmd/root.go @@ -19,7 +19,7 @@ package cmd import ( // register default heartbeat monitors - _ "github.com/elastic/beats/heartbeat/monitors/defaults" + _ "github.com/elastic/beats/heartbeat/include" "github.com/elastic/beats/heartbeat/beater" cmd "github.com/elastic/beats/libbeat/cmd" diff --git a/heartbeat/docker-compose.yml b/heartbeat/docker-compose.yml index d2c9e76f26f..1bf92fbb0e6 100644 --- a/heartbeat/docker-compose.yml +++ b/heartbeat/docker-compose.yml @@ -5,7 +5,7 @@ services: depends_on: - proxy_dep env_file: - - ${PWD}/build/test.env + - ${PWD}/../testing/environments/test.env environment: - REDIS_HOST=redis - REDIS_PORT=6379 diff --git a/heartbeat/include/fields.go b/heartbeat/include/fields.go index 38cc082ded6..b7cede61275 100644 --- a/heartbeat/include/fields.go +++ b/heartbeat/include/fields.go @@ -24,12 +24,12 @@ import ( ) func init() { - if err := asset.SetFields("heartbeat", "fields.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("heartbeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } // Asset returns asset data func Asset() string { - return "eJzsvXtzG7eSKP5/PgVKqfol2UtRD8uP6Nb57erYTqKK7Wgte7Nn92yJ4AxIIpoBJgBGNLO73/0WugEMMA+KlETHp0pKVSyRM0Cj0Wj0u/fJNVudEpbprwgx3BTslLx+efkVITnTmeKV4VKckv//K0KI/YLMOCtyPf6KuN9Ov4Kv9omgJTsle/9ieMm0oWW1B18QYlYVOyU5Ncx9ULAbVpySTCr/iWK/11yx/JQYVfsP2SdaVhaevePDo2f7h0/3j598OHxxevj09MnJ+MXTJ//hZ+gB1f68ooYdWHDIcsEEMQtG2A0ThkjF51xQw/LxV+HpH6QihZzjI5qYBdeEa3grHxpoSTWZM8GUHWtEqMjDcEIafJrjY4rReLb3bsWIRTKTitCicJOPU5waOteDqEPsXrPVUqq8g7n//PtepWReZxY3f98bkb/vMXFz/Pe9/7oFd2+4NkTO/MCa1JrlxEgLDGE0WyCoLUgLOmXFbbDK6W8sM21Q/5uJm1PSADsitKoKnlGEbCbl/pSq/10P9c9sdXBDi5qRinKlI3y/pIJMWVgFzXNSMkMJFzOpSpjEfu7wTy4Xsi5y2MRMCkO5IIJpw5r9xVXoMTkrCgJzakIVI9pIu61Ue9RFQLz2i53kMrtmamIphkyuX+iJQ10LnyXTms6Hzw0i1LBPHXTu/cSKQpJfpSryW7a6Q/jMz+uI02EAv7JPuq+jlZ0LIs2CKYtgklHNesdJ9yCTIqOGiYYxEJLz2Ywpe7QcSpcLni0AscYepplirFgRzajKFnRasDE5n5GyLgyvimYYN68m7BPXZmTfXfnpM1lOuWA54cJIIgVrLcfjns6Z8Gh1jPEs+miuZF2dkuP1uP2wYDiQ45aBmhxboYROZW3gTy1nZmlXyoThZjUifEaoWFnoqSXDorAENyI5M/iLVERONVM3dqG4eVIQShbSrlkqYug106RkVNeKlekDY0+NmnCRFXXOyF8ZBYKew5MlXRFaaElULexrbiqlx3APwKrG/+TXpReWfU0ZqWRVF5YdkiU3Cwss5YW2rMQEXKhaCC7mdlT7oQUnWoyyfBM33LHZBa0qZrfMrgnIKqwIeKtdpxg7pM+kNEIaFm+DX+qpJVQ7giVRCxMsGbhvIed61MA4tkRg+f+MF2zKqBnDOTm7eDuyHB0vhjB+uiy3vbSqDuyCeMbGESHEHCeXTCOTWVAxZ4TPmpNgiYNrou07ZqFkPV+Q32tW2xn0ShtWalLwa0Z+prNrOiLvWc6RKColM6Z19GAYVdf2NGnyRs61oXpBcE3kEhA/TtgKULhHqrvr7e9hMH9SLFFwKcLnfZyKDFxVa86O/fk3HDohn3EKRcT0no0Px4f7Kjvuh9P+fxdAvrOkshZCywhQnKAAhTvSyJDm/IbB5UOFex2fdl8vWFHN6iKmDSRz5RdOzFKSHxydEi60oSJz11HrqGk7uT1vyVjT2liuUJdUgJxiGSvRrKIKyZRrIhjL7QEUjiN3pksG9MSbydJOPlOy7MHJ+YwISfxBAzTgCfQfyZlhghRsZggrK7Ma9236TMr+7bY7uYvt/rCqNthuf9ztBEQbutKEFkv7T9gHe/lrFDQCGUxXEZ+0N+U4RZkIrCvsQPP8EsZy00xZ8wjwcT6zhJIMN0w0CcGUNFtwwfrR74bo3wOe72IHPgr+e80Iz+1NOeNM4XbY4wV4+JbP4GKH219/17M/QRKzTB0vAXh/6XcDWD7Pe5f8gp7Mnh4e5v1LZtWClUzR4qpv8eyTYSJn+f0Q8NrPcR8cIEuyQq4qaVGs3CWkCc2U1FZj0YYqK2hY/jBBUuf5JNxa65Az+6qZ0GMmK3hHpHoZf7aZTHXmBrIcImczkOUoHisuuOHUSEAGJYKZpVTXVugSDLQKZJsoKyk2pyqHW9LellLoUfQkXqVTnnOFH9CCzAq5JIplViFCeeDDyws3HHKuBrIOOPYD+3gEDNwCmokcH7/82ztS0eyamW/1dzg+CtWVkkZmsuhMgrqn3bvWdApUamaVES+OeGQYRYWmAMCYXMqSBWnCyu72ScNUSfa8kizVnr2cFJsxlUwvWsvRKOW4r51ciHs4ZUEQjORdmJZYUMTc72AzeAwz6pqOWPzQllPVuoblN1InFxak32qBKAYh1ImVznRBesZpEGmlsWY0Sy64JftwgIOCnpwmN96Bn0ixSjEruMH1iTe51Tg1K6kwPAMtgH0y7tJnn/DkjdzdynW49I0kN9yukf/BGp3BrpEp0CM0NzV12D+fkZWsVRh9RotCIypB2jBsLtVqZB/y9442vCgIE1acduQoa5Xh3ZQzbSwFWDxaJM14UdizVlVKVopTw4rVHUVGmueKab0r/ghkjbqDIyg3obvgAtsop3xey1oXKyReZ9bhRZGMp2XJwK5FCq6N3bPzixGhJJel3QSpCCW14J+Itnq9GRPytwbHeB+n4xnpFBxFlx42T/STsftggjjsFy/AsNRID3mNxhJUrSdjXk0sWJMxgjixamPFRO5kQSC0ZEh7V4BiMx64yasNb/LkwTV7dH4RFu64I25Vz3Kd8caCKFXQ9sn5xc2J/eD84uZZs8ED8FdSmQ1XUEgx32wNF1KZtdAHQw7NdiEIvT17uRESPRhIDLuAxLFAnKA1+9fkLTOKZ7oDz3RlWA8T2GRXgsBx9OJkMxD/aidDfdoqJPF1YyTeSJEW3CUguAbuDe3xhpSFs20EbgfUOYvFfCdp/Zh82BK1boHmRyaDAYtaFUSpVWy+okRXLOMznpFCosmWKFZ4dmTvuJtGzMMfqSycqTmEKX5jb127XmCyMQeM0RtfNIS0fBEpMjxAyeT9WxdGZ/KqkrwF8Br8EPJGijk3dY43Z0EN/JEqb4EIvvlvsldIsXdK9p8/GT87Onnx5HBE9gpq9k7JydPx08On3x+9IP/7Td967O3OBRPmqmXPuG1V3fN8y5piu0aYdWBJ76QyC3JWMsUz2g92LYxa7RzolzgPzDoA60sqaN4LpGJzLsXOYXwP06wD8V9rNmVZLx65+QxI5GYtBt9KYRSjxbqN5lpeZTL/LJt9fvkLsXMNbfjZms3+HHC6Db8VzP1/fdkH6dB29wjLdwbxo2Zq38vF0ZOoSXsmOiLO4ITakJyRuaKiLqiyFOPcLIrhtTD+qrtdKK0GIx9yF67wMsmYMEw5LXdWSKmIqMspU+ALAeOG1yd1a2gEsSDVYqW5/cU7UTJPyroDzjsJ5jn7eLFCtxQXhNZGlnBzzZn06x7YsanURor9PPuqZeiQdd62czQfbWbm+AHv2+gaRQlA1uAH4WKmqDaqzkwdO0saxNh96Bhgb/WPzJywhmZBHRuQqSCvXx6ju8becjNmsgXTuHdwZ/NoevRCNTDbiz51JSb+L66DmTEFIgyoauH8V4qV0gSzJJG10Txn0Vz90FHi3DHxkLHHBl521Jd6PnHYZijwQrnpY0eQmyBF3GY6ckxAlZI3PGdqI/04UCPLju8nxCcXPqzYAxK8hbGrm2XHIzLP2IhIlTIaPueGFjJjtK0LBAPADeUFnfLCXmd/SNFjqV+31FrvM6rN/lF2vxWfRWCQP0AH9h4OIEmg9WYzBxaDN8lGKxiCsbuyzRbgbpa7QO1t/uN72qkD6Hz/6PjJydNnz198f0inWc5mh5st4txBQs5fefKDJSR+h2H4+/16D2NJCqBF19UmwPlv+51Qd8GuOR6XLOd1uRngbz13irxVG8BNM5DfHowmnj179vz58xcvXnz//febAf6h4eIIC4QGqDkV/A/njsxDDIlzf6yawJH0orZCAIcQB0LRcLRvmKDCECZuuJKi7Lc4NRfi2a+XARCej8iPUs4Lhvc5+eX9j+Q8x0gMDH8Bz1QyVOOhieaJlTnKReD0XlpofbyZxBDeSi3kzozdCXeKLPFeeW+DQ9Am7NwZzjQsZ/EwYDfVzE+5YEVlxWYUW/DGnFIdEU2YQ3s9f2UZleGNtrGlMdm9vSsW8B6HJyUVdG5vdOCxYRm9XjCM7xrgW7v0iQawCG8bjsP8JZ3vlmnGcgTMFkwICNqSajKteWGCcDQApKHzXcHYHBYHIR26J3eJqQaKRtvuAJBEVW4CQhJhSUKw4tVd7j9Ajg9OJG3+FbmIUg72qvPFZjwsem8DF2LsoQI9FY20By42dc2gWzgPkes1cc/kS3Z3JT67R5/XF+/zivbrH9XxNbyEz+/9uh2W3bnAYi7zj+YHi9mG9y4B3/uCnWFrYO7A++gRe/SIdVf16BF79IhtisRHj9ijR+zRI3ZXjxgLQk+SY0o21gvfMkP345sxXK9G2sH+pNSV3sTVWyjr9ctLPy/uoAtUlLA6TYwckwnL9Ng9NMG8EZVmjNpLtay1wQBv2KZiIDzV/vxqtaffa6ZWEGyLEd5BoeAi5xnTZH/fuRFKuvIAWQTrgs8Xplilhyfk6kUrgjFgVQhmYeU2LgybKxcMS/PfLNgosaUaYrZgJQ24cffs4JLAUFwrzBZ073BNjiAJaMoMPSa9trnogWbQQKhKyZYx9nX00cZZf41FNIOkGhcQjOODukLFilxzkY8to7ErLTE4HR8wi8jziflvdmsKhn5Nu4k+5Q8ivDHnsp04x41mxaxxY1qx046fYHNzt+TnyuaYuTy/LqxDqbG3ARSlyN4CDex2kxLaO3frcnwwTODcdnTP1dHc3MVEINebTkbF65u7JKkivfT5DXw0eb/roJBzgs4FxbOE6sbkDL5NszS84uNp0i4wyhEFo9MCV02bxM8xedMkKAPX8zmrkK/AS2ZvYe8BtZ/aIZq3Q6qrnMWpzn4Q6lMmCWS8+HAHF8LQ5JGg1kumDJNGvDJKvY3QKnaxWjpCK1lPGsqUmSVjdg4fny5yF5/AlJvApXNg2mtWSG1XcuZRfTtavdVIKmaFBtBDChgLMwHgzyQ52ALRj9D+jNsErzEJNKgtWSnVilj2BzkGbqC8lal8UxeCKXTE8yZn2T2mMyrsQiFv+W4X/U5Z1/kru/XBTh347x2yx+yN0IX0YczE9pzb8ZObdSgxbM5vwG/aPvRLey69UzmpnuBHTMbyV88IjOl2AHd6IvHNa9N4ncWwNY7YZFDLnybwxGREJtpQw+wvtKCqnIzJr1TZAwDJ3rMawqOCdCJnVloZkWUqelQFBSOSi3exwrMrgEGzjFUGMmJd6AveTl7CGZGqYFQDw0yGBOdBRuu2sBwIAeAeuGBcrs5OLhnkE26Goe0PIsOCzxcu96n/BhjYufOUDrhGRgSJVnbbF1S4PRxjMtpk5J0BmgntspEaZYSmZOXAb+AMsiz1yWgbkEG6YewByCAZsdashwz6aKG2uiY4mIHH9lMFrmwXNAHpyngzZbQywHldJvJaJhF0T5d/2NAHFykxBAJoDv6CphZIRw1+ayfR9QIHHnj9Ps1ze9bdhb0PFzbLJ+lWTma8YPuZYvb6nKCbC+vCcN3ku/r7062U27lKULh7zyvsUUW1tnjdx5S9/o2Stcnk7pzGdjVuittY+Xn0dbRbVLjtHkUkrNPozGaG1Jhij6VPH23uf3zY7ZSuswx8eVDeZkZ5USuWMuZkzGEmvc2JTIccZNIbnki3hv4N3lVpgfcMJEAUvB1W6h5FxP5c4IrojYR4qBCY0hSUsgQLZqQhFUrmdbHzihg4i7NVbVQXAhPTY2aSvBGNqoONCnP4pQqVTXqPcLnSvxf9yLCgabapp/TO2HDTDJkzpLBEjRbGiXt2Qr617EwzQw6clK2Z+c5iJV291QNSg0o9tW9Z4RzRBZw4OeUxmkP2sbOqtOw9rrIVFw0QWCUHTFHhI7ffloAR6nHbbJ5IQAMnTLMbprjZVAIa8jDuPd/bbI8u3XytK82D0RJufl04o29/2GF4y4kKJQMXobAcLgpVDFpgKJpl9+cbTeqKGNniusn9ZDliSa8ZAZ3KTccd+82k0Fwb0CrRztdrQguXFeb5F3em/K/JR0tEphaQEe5smi5cnGN9I72QS4FxgZkpVmTFjCXX/yG5xEp5Ul0nQ1r5wfJ2TZYsCUz5mpxr8v99fXR88n99XGKabm+36n+g6p5U1xYQOFFgyWhsZMmAGEzKs2vdS6V7l6wiR9+Twxenx89Ojw4xjPbl6x9ODxGOS5bVdrvxr2Tf7M5ZKQRFO4VPHI3di0eHh73vLKUq/QU0q62ooo2sKpb71/BfrbK/HB2O7X9HrRFybf5yPD4aH4+PdWX+cnT85HjDg0DIe7oEe1mo3iZn4DtQgfw/uujbnJVSaKOoQUMQ2nm56dMqHFvH28lRBRc5+8TQlp3L7CrKLci5ttufI8eiwj4+Za0RsQwcy7FCCQ8VlZRlRiz4zSdXaJ+ZxNsLc5+SGS0Sob0BI/6uc2gWVC/uJd411NXEzPf9dvbXl6823rmfqF6QbyumFrTSUNEManzNuJgzVSkuzHd2MxVdun0w0qILZKgWwyEbb264QGvVjip4mFijV27ghAdbBiGokJplUuR97oHzmSNXUBGAxvBvJnIgsWtheRJwK9QNmsiytmfCs+yMBZ4NkAikXZyhiWDuyou8ZBsnudxJIwhHq1lEVIkvqVr6jSahRmtTgc4Z7NJbx4Gdav6FYjRfkW/ZeD62OhStC0MuV9oSSRhYf4d3WTKerFwhHQiWX3LdJ9eeNXJ9mB9nB85wSqg95lKA+fL8lYNj73WtZMUOzkptmMppufddqhLS6VSxG7Sn+lcuP+x9ByZaQX766bQsm6uZ08I/tX/49PTwcK9dQSmYalDJ3JDq87jY5dotdcowjt7Jm+utROseHpKom023kjjXhovMWbD/JfrOlYuJPvKTdyQSp4TD7ekeHvtyogCqxtp0DVV4Dt0vN7kaQC1gkP0UXKCk2Vo4x9K6cT28ZMzpKiqDphjSOriaMlqMyaRZ5wQ9C3GFzvBdujWfjKKZ8ddLDOGotW8B2LAE7ksBp/vjKq1lGD1bVVaOkuBwsDcwGmWsAoQevp7N6fCs5pEeeGOPhp2g4Y5tyLtEeQut+RJ1gL908y3+A+5H8SoartXUvOvqBJbNbsFCtz1syMZvPWrO5GQZRy+SaGb4jZX+LZ5mXGnjK5sOLYxtZfPfdln2lrp1UTBVvKSwjGREu6SC3r4ixfX1lW6xwHWMcVZIuqGH9j3X1wTGxmKnXHY0NMe7tRPMiZYFmHt8HTz/81EzLJmFtci+0UEbciKBPW23LvFKSFVusYFbrPUd2Cr5HyyH+W5Z9ii4ywqQ2g8tDzk6PFxTj7SkXGCoD9YYheJgVh8tMVqfCvAjulptaPzTms9bt0EDnIYy6DDMkmKtGs0Yoc7sCktB3DrllBaFr0DX4+Ce8cDPW85s5+7+oXlgCI9nMErbY0qcaST1YYHTWZOpFfE8K3SOXPs5BNt4tyTYNwDyMYDha4L7S45qLTPe1EIGvdFXC0xK2yHSDpzNxPtQgYhHxCykZq4yOlqrYbJzL4+Tt1JwI+F6+M8fzt/+l6+iDvYwl5EOBQUhfARNvd6e2s2pobMZw8vCPt5eg4mK6Dujz1Ye2SaA3DQK1NCB6ZeEk22+oBYo6XL2i/SwNgX01ZyZq4ea8wMMB0sAsUOvyoKLa907N0yQxJjdY+aYOcBuhtE7RxwOeMjGKeSSMKpXFkeGAalMV47Y/BCR9SNop5VT0toIje3f91gPrAGcyWDiHJGcKzhrDqXf9aI0Z0kRh3vM/wpGGkhyXUtSXMQxQPcA4dwO1JiwfMAPciwRfnd8pg+UOopteCDasvIoeA+sfvXx/NV3yEncbRpFan17CV82yCJyKVol1IKhcRknFt+XamC0b8AErjq5kyHt42FQc6F4SdUKeRvg5MfWsvtnT1IyHmz+uBLB4Nzl3ckzHP7DZyeH/QC9tTQb7zoXRGaGFi1bbC9omv+xKWiJkahLA3YkOzWkT1kW4myL0oo0NM+9GjOxo00IT2UWcBJP+llMmSSUrwcykccTIN9YSRmCqQBJLlIChOhS5vYE5b2zZ7uYvWSGYkw5eK7zHmErJlifIxV9tHk0IRJqFE1YMicLNpGw8Ix2IqWyLLBgN1R0IoOTSKoHiPp6GIvbcNAqrt2XTwe2fVAV1Fgp80/IMI+djwBaz75HDQHctv/UfLJpUW5fdCaRsV1dZZLJsqoNRjW6qi0QNQ4RfVETkR7bZdxFpJFSsWeIiEIU01YhWJND3B7CaFcKeG1iFhdU5Uuq2IjccGVqWviaKXpEXkFhh6iIBao7P9dTpgQzYEzN2V3zxO2q+onh/l7on9zYcTGYPvONiQrCe6vB0vs7Jx7Cid3S0i5dMVMrrMy1YY2ZXa3w3Uarg3RNZ+ODdUVritbyEVLbUS916Td10fKI/17TAri4T4q3o/igXwuMC3ZqYoystILhSNqe7VbZLJbxPPQ8QiXZSPvOUH76LoNa8Tz3WfjOdCBU78lzPSew/M0IDAjOmRf4u70CuJjP6rTMABdogdmoHs9pkvRRe+/kBLo1wBaOu0h66CR+4Bi88qnnnzfn/Sd3vG6Zfde9TwaO1w9SucpIvnCc66vhLCJJ2Tw7FDQwmoTSVpPUPHc+IzflyNfbiTLlAvsdxXb/qA5TZNRJRmyIcAPCC3GXKltww6DQ4p2R2jh8P714dvXsZEOn7i8VU9Q0rZwSYHoS3WUs47rLvBnjEsaIntgu6d0evl8u263M+sOCZQvweGcVq8G7f5qMbmR15XDa9spb9FVglUpf2Q89w1ofd1oc7QPrvYqbupG75M57SS4ZfAfJp5199xOTb6GHV8aEkXpE6mktTD0iSy5yuWzbt5t6VFQtudhhJm1D3m9pZonk3/fusVhU6HugndGSty7h+8KbsymnYhtoLx0YbiuguWe+oGZEcKwRtCmc6jzelp7FdJNP77+ao8Px0fH42b7Kju+zAT6fEoR4RZdEGwWVJHuWcW0l3+JBV3EyPhkf7h8dHe+7fIH7rAXh22BJj8VCenb3sVjIY7GQFNbHYiGPxUIei4W0QHwsFvJwxUIWxrSs0D99+HDhPrlr8Xw7RIhpuWuhWeypNy6ZWcidmZZ/MqbyUxGcaiBdBB0eaCiC2LUpi8MsjCSFXDIF4VhWT/b1P8bkkqUnYu9NePAlrbixI8DO7Xkn5N65Tz+wotXrl5d7hGjMZu+Nmp8zMyIV5HdX9UBCo8fnVOarsfOO7AqrH5wFD6groBdm7gMf26cvpSoGErU97NAXUW1Yqv9OKWE4fpPRBpTsp++D3a5Qnx4cTAs5H7tPx5ksD4ZWoispNBtrQ02t29z8ttVsHsjtCBtnIzhbh6GHVZwcntwC759BNg74u9PNYMWhB2Qebo6B6jdHKWDDVSnD8eyvTvkAFPFBGlq03LhOYvYn9FuLatAKFozmTKUmjmZZJ0+eb8BkdreUy3WLGCSXFy8GofZE/ucg39H5A2A/PqyfHf23HdcE/43KO0/Fjzfhg/XiBjpuaJLlLqOCM3cUOwBLXazd35r/Rs4bSdRHqQ+lkmOR6SQj/9ez9+8mIzJ5/f69/ef83Q+/THrR/Pr9+/6l3Tv5cDhLDwRacGK9XdmFxSakrZK/BtHYuigwoBZs3z6I2OLTZ9HRdhg2XCvRE8lwUzbDagkFN+g3N6SGhIhQ6KKiqrcu2jn6NxUNVdbIxE3hqms7Qo09odCG2KcJVGmcPYnJw40UFw5o1Q1wix91Fthy7qArdkFvWMgp0pbGMDQm8+XiqqrgLEdPEROZxHLeigi2TJU6LpiG1lA3KPtmBaMCcmlT0IeiobdNTSRaupzDbzq5iVbSBrev84agjL5RemLCilyUcMqO3iUfbh6V40OOu33TM1mWtXA4x8BWecOUZ2gu2kKlQcsu1sK1/XZf3SmYww8bMifaUcfeAnpHBrrz+Jo5v2H27nFeLyjgJ716pBs13SOpj4H9CJLCr3zG+xexK5fuOep3v1yeQ1hfgQd7GdsaHMGRN3TF1Jjw6uZkZP//zP5fs2xEKl6OCDPZF6mnrlNT7Vr68c2poFdoP9kV7RByfvbujFy49v7kHcxGvvUK3HK5HFswxlLNDzDtAgq1HVTujX2Er/vB+NPClEXLG0jIpaEipyoHtPtCKv5dOMhcE1rwucC8ezx975j5oZBLywtb42n43FtZIOsPWUbtEsD61te7D88GiF5RobfoYLBd2wwoXqHDqYx23GWUC20YbaqrMPIzjh9b35IhA7yksGeFfFvn1YiYrMLzss+zsoKDMv7uizwqa8+Kyar+XYI7uuMmetCjcoYoR0aLPrFoVke5Pu9GTblRVPFi5ZKVsKJOulMLLuYaxYqSZ0r6RBncelpo2eRhxg/r61XFRoRnv6cJxjOasamU1yNiltwYjPOKOam3kGpuaifcNPVab5jIWxA2yTsha5ZlMreCh3M7h3ROFCAOcnuDnF9gbLxOwbNEqSFCZsmVz6j+Mu2K62iQ8rKfBj0X24me9DxcgX4adO8Q9mkMlqERKYBv/EYzSwCBC/jH//EQHYzwHUznXLGdVaJ75Qf3OoeXDY2is5lPNkteec+s+IoJrI2Yftq6qv6JcDGVdecK+ycia9P/BReGqVQ5xS8sS+v9ohZQVKILI5TfLmlVRYWbXe1YK1vvQ4s8UjaJfK7q7igIzyCWpQwHC315HmDH+UYTcLxb5N1wthwqBN4PiUe1VKRiipfMMDUMWYu7RFC2IUtAsv9C3F1IQfdT9ctn0aZ1KHEm1ZKqnOVXuwnyjNo1hbRolx8WfeWU/krJT/1GpqPvj8dH46Pxcf8qnPJlVle7S1c4g4o1WGEZ4Ae9Nmqgc36B5X/dNUGd/EfD2trMlTQev1R9HAdTCCVGymKfzoXUhmdEO+kzbtyZUnQhl30WjTeMKoEZydQE98acm0U9BceG3WooUX8QkLnP831dsax3R745Ol388n/0u5Of/s/bH5++/dvBi8W5+veL37OT//jXPw7/8k0Kwk76Nt1qmEVLJlwl4AECXE+lVaA9jxwoezNxbZBgBFeEMW6M5T/3NXBGZOJFYPcVkjRXRNdlLwKfPHsxcA3fpzHUrThxo98LK26MHrw03/RgJnx5K26OT7p2nFaYqg/MTT/dMNNGhNG6Ke0VyzgtPG8dhZxNTEpoBGaXQxv66ObMsMyM/MjwOKa/3z7Wvtf/3G0SlQP0crkXgSnJam1kGVJscBxosAxZE25drTx8KWZ8DkVpjSSqFlusU8uZsRNFtUp9ms+MK7akRaFH9qZXtUa8GKSig0rBemAQnwbi76zoOtRMaKn0iCzZNJk5Gh6iMwqpNekb1OLr7OKtW7szp/ktju1ptCjWmNOcvITDQsQHFasRohJXpcP+al9uAPdYN5f/GlS20/7JW2fZ/r1mNQ5JXn94A7leUgAp+CvCFQpKu1Y4GglVeaBuYc6g6rtbPfSHfP3ycnyHZhWfr+lgJwb9M/aPDHTSmfxz5pINQ9HRax8MhsAEcYqkJ3UPGPfr87MuQ6OBo+V1b2qZKk6LHdsSAxg4m4v86gKzs8ygRdprPmyPr3q7Sd1fplxGmWWU/mbzdspmxFXF9LjrkEwGm3jlQE1GZOKZsf2d5xr+qbQrJP5pBb/IosCHkaXb3xq23O/X9MM+5uE85uE85uE85uE85uGsWctjHs59GN5jHs5jHk4K62MezmMezmMeTgvExzych8vDkWpOhXMjuhe9JtP9ZvMwtHhYfx0zoXi2QPSBVWuo11hZUbGyly4iJgwca5mt6LFx2o91wYoKypNSpaiY+04lxvXKidqcUIFhgBDY5ZopuuDLMG+8mLvG9+4yPC3eKdKpk/fnVsqKcTdOKa/VLXpAc96c5u6rLXc15UEtuU9D7tWPO9pxj268JSX1aMUPS00PoA23deHehdz7SKzXg7dZ4ppD09GC7wNnV/9dB+WddN/eRTxEQtKteu82CB9UEHvB72i994F+rb67zRpu03VJ20HoPCQp27tIPrxL7/FBZhdaHo8H3qSiuSmhbxOEd3ifTdI2DCK0Qwtlnh8kp9cFl8QB+MiTfQ/HccXzCZEzwwTRhq60j1jynY6xiblVSKMImExWHNVyqGxYyCktot53HuRI6NmWl25cXW1zL/ZFwFHKEV07NNdT6LMKCB6kHjZHXNYPtGkgVrxkUNhrrmjp5F5FNC95QfuDdwYXVPUi9wHSwPxqKgoV4jrl65qSXvNt8tDuhFGq5nXZ03jN/rylK6tAoNyJZFwpaVhmwKHMDb9h/R6tCL3/uaf1Ym9E9vYL+38rPNh/fUuwZ3v/1b949ollNXTY2RUKzqbQcYFhKok7o55BNNP3ruqg1upgysXBIPUAd9z17sEkA2GbdiXw/QgzlvCAGN/EheqwVowSfUkFBhTHnW9SD0pUxo5QMlVyqcGX55O/HEAel0s2JRV0hvGtGq3oKgb7cUAXunx8n1PXJGYfn2zsp4LWPOevdtPQpbm3jw+Pnu0fPt0/fvLh8MXp4dPTJyfjF0+f/MeG1/cH3/M+JlPX5mUA9KVU11zMrzDqqLdV910kkIOFLNkBLeL69reC7mAhARZv7Uyu+ETccFbtVNx4n3y4qbjRdB5j2OXZl3qe0YwX3FixoeI3EgiZKlmL3EoLnGGV/aY/LfFJpvCdbvfmcDHwmjHoLl1SsbLqR8aaIJEP8aRhTOwSCH5nVDzLEYHMtRAujIeKO6lBV1JAkqFLCGxE44lD2zjyBp9B01bFDIt7XjaBGkyPonTLKSO1yJkC1S+E46iRC8scxTGZI5IVHLq6+IesCOTj0eLY1zE5x8Ytblm0KCCg08gGZF5NRijMUZCuhMMLIIW6xIrzC2IUv+G0KFYjIiQpqTGQBwieeQMTUAUdF1chGj2e5JSOp+NsnE/uWrG7J2Rm8CBtGjZzVoQMZ4sWICHpy3+20p2joI1OvN7lHaL13Es9SZeO0qBaaRR9nUkhXAg8XAoYL6XYnKocA840dOsYRU9iYseUhxhIKwtjalYmVa6xK9uHlxeh3Qw2t/WQITgZ4/ZvhykuOLTBu/zbOxd3+a0OPQ/sUM30ODxWXg3ZZO05XCnwYtVdfCvOX2jfXxzYgQuUIzQztTdxYncxpkqyF0baw/ryMxdz4mcWLWC1r78MXzt1x9tje5JTfd3VDBmYbg0ew+7ao14mQ1Po4Y2QN6F7HMIaf6tF1uhQeNzde33DNCgU0kSDWTrBLdpHg3Zvw9+XOPyBBz5t1YAqH80tHy+pMDzzkf7e9fkJGweMmj7RVkGc1YV94IbbJfI/WGSJFSRjCvTPJuXJsyoVRp/RotCh7aDv/o+8yuUQa8OLgjAB3Y7hsYEodoukGQc9hVaVkpXi0JP4jszIsfBdiZoYwIQ95XBLwp2BieaeX5RTPq9lrYsV0q5rw8eL1Nuvg64GIVPgeR4R6ouTA5+voay5tLQyJuRvDY6xgnc6npEuO03RZZPugDQ/GbsPJrFzuy2bCHtpNJngeY3hpKjxTOylZMGajBHEib3/7A0GKf6ueH8yJDQjtWLGkBl79xGXcaRj8uhLvN9bngJyfnFzYj84v7h51mzwAPxbpLpuoRRLZdZC//lDZteCgcSwC0gcS8UJWrPvJMujyQF6cbIZiH+FtA/okNKkd7q4R9T98JoYIqD75F800G6o4F24fIxNwO2A+hje8xje013VY3jPY3jPpkh8DO95DO95DO+5a3iPKy7RNXE0H24eYOErVbT1aRN/JxUE29h7s+nLhTE/NPbsFQVEUAwF7sy4yF05Ne+XhNIzaMnyd3wYz09v32jl6DxAO7kH67cUBcj48oW1EGjxgQUM1S3judewsP1SETp0rpAa/fv4eEmvmbZKVCW15tNWu3wj21iN0jlxB0VU3nAYtNCxyZsmFYPQGMWZyMCnoXXNNFo+7JiK5XYxrj0c6P/JgFakc3FavlMzz3176ZBLKPKGFtBSwMUcGlS6tnNtSJtwlCfP2VM2nbFDyp5lJ98/P86n7PvZ4dHzE3r07Mnz6fTF8cnz2UChontl2jWODFZQbXiGptl9t6oNvRixIORpvkm8cmdqTe5VzOvCAJCN5drBQUdYMBSHSlGFXGrgekuZDOfR3Sh80A7Nn0TVELdvlGi/d62hUoJEbi0S3xkG97meahNPhKJpAJYMcVZgpT4HriWNnGuj+LS2w/jCP0gvqgbbcFDfF1IbTUy6vOaIoC3T2/T8orFohlvagGfd1V2Dki1yRl7HOx9vASzLpVD7eA7Uq2ptWglX6G78QSryV0aN7g7DtcVazma0LgxUbqiCtyjgEbqlJuM6T8iMCEn8OKG33S5akA2ciG38eVEu4p1OAwzgfTYuTR57e/ZcPQmTtPebbJGxB8GOegu3hAFb+dEpxCmxjFo7FypOJTNMEkS2j0nkkTU7SQ996Xr2wQStfdk2MG1rGnoyPh5v2nDt31zIVot0YkllE/ppuCMUcZLXViSlLsKYGWxRnAosIVrMyrJ9xDOAJ1YtWMkULXZYP+a1n6MjpjTyBfmWz+AmZ5+4Np14QxLJK02HUXApaEIzJbUmioHX3dVgC2TN8wnJJfRW7a94/4KezJ4eHs6aGQNBg6OgJePGn20m4uIrm3iLQvt46mxxB0nl0vZQm3uHYj+HcxHdTYr9jF4N56X5R/ZqtO+FHXo0uvrGZ/BmYFGc7lH9x/Bm9EH/J3gz1oGxQ28GHq9/OG8Ggu3cA3EBpgEq+hJcGsMwd+B99Gs8+jW6q3r0azz6NTZF4qNf49Gv8ejX2Mavkeh8tSpShe/j+zfr1buP79/4G9Y1rseqplXBDLPfjlAH05lVg0cuehfqpVKzuKMeNtz75qESb7GTCsubhjS1gpquPojaLFJVrUcPeCeNi7njoqf+4Sgu9pUDIkvMbaHY/8UiLxkQYokpaFw0g0j7Qs4d1dnXuXa5YL/V2jRBir7EZYPwlmYWd3AJMejh9TA8Bd/HkuoA9CjsdFtCGjI3pHiOuzU4I9s4k6cnJ08O0Nj2z7//JTG+fW1kZYcf+LqfWiwyd0Up57OwV6ij89Kqbg6HEK1ZazRVj5DNNApwSJdPRpzUqhjbMScju+EQGWySLVIsk0IbVYMdTSriNwrJMj3xHRJtbcidtqAfz3jEd4XpSxi91R5uFAr678FC9gaO4SmmTZ5OfJOiikaqMIw8jJ3tlNOHWe0rZ6IZWm26XX3LPheYYWVJz55+z19cmLd0eoqrZgol9zEGvlghywb9KL2HESh0lYATBjpHONJOan4Djc9l6KLlbDpdtSigOl3RgD7baxUZTnIQhs0TP8+GxpEOvk9OnvQCfXLyZEjzNotd0cYFNJkaogx3bNsk4QGDzJNdQWYPGUzgmFUQegBW/AbzuNvwJ8OEtbRYTx+Zw7n+ZzjX7BNUJ47K58czQvg8HgPfdC0ZSEg7DlByKKUZrQVeD99RmHNam/BUugLTQgTa9ZuOXGVlGrhgCfhE6jvEEVqOtMSTS6bMLJmrr2+WEk/7UM0FReflDhu+2hMU+X9AYJoZl1My+XoSEamR1eBmft3LpD3wA2urNVO7zPX+6MZv0e2g3U3r1tgPzAFw/GFoYry0JHq9ZR6W3RSIX2i7cPrrwMCjKPVCF3F2QyOSM5I0ovPYd/8M3QzBBwaacWw5t59whgkwzY0EEy2oxu4GZkEFegTyUaOJCChVtPJSOPAHcC8SOWtgWmxYrcao+rZiNRiynXwUmTyTzzslbHrK3KQ+uC8h5OqXllejbodgBdO+3Z+B8/EwIT+0mLJEHlgnPS7s9e4rLxRy3ghXa+C0YnjbZnWPFOUzAJi8huZoiex4C+f5RqOWYUHB+vQ3lBdNHYAO4KykfHfasT14MIOX9wagWFC9MyHIhf55JrBIw+9i1oShAvAgVCaTYlVCjyj7SM8l9FGzWV1YLE+ANKDEinJ/QKBUCCaC9gpA+bRI2WGrJ1JGhb3Q3DU+gK62b+BB8fUjxN8EBs3RIAD36zg2ASSdbUMBcQBNW9JLZSaWMa2pWg3cPGlBrub+IfHn291COKS/i5poCKvquHo5vgSEvxXtuyu0jITh9EIuXVfgJZuGOAwIIIpKrWMtAKqs7FUHwJNaRF+g8coBfJPG4zTY61Vl9t7KP3hR0IOn40PyLb9YSMH+L3l58ZHg7+SXS3J0fHWErfx8abDvyFlVFexXNv2Zm4Nnh0/HR+Ojp+Tbn3/68PbNCJ/9kWXX8jsfHnRwdDw+JG/llBfs4Ojp66OTF+SSzqjiB88OT8ZHe9tcGXfhwjjZZriMPUnN/m/RJOFhtvTfujvZhiTx144P+5GIrWvGD4dLJI3tcekAeSz+/1j8/7H4/2Px/8fi/2vWslHx/6/JB1ZWUlEwOX2CiGtmyPPxIcmpXkwlVbn25Y7G/hVIaqm1IXMZfFqZHq9KcHVBVZIl14wYpo0muRTfGNJ0YQ9hUYya+E5BDNGCh8ykiprFqbuxouD2ks8VRSyAat0dtdWJaf3IrYd7R/86tFi08rirfuS/+eXVL6d9PRKdEfKAZfoAc28Ojp6/SKDthaCPVAb2vt0Wyt3uDrJLdgMRxF0BeMkUI4qVMoQfdRb0scqtSjTjBbM4PeBcHzj3Ic0yCaVxfJ2PrvA+rqgJcZdbLOjCvtYngsaCS890JReh6dUW0721r91lOvrbnaazr91hOpR7tp8vlp1CpIAXogbmkrpndVGM3zZL65eGBibt7OAGk/ZtX3dSR9e1KsJRA3/0RgfgslY8o4aSUuY11gOsNZipx3EcaBQK8YDnueunSbx3X+3bYZHpfRUE37/iXz1TvHQeDOgfKwW8F+LivW0IzB2FK2nkWn99lSqnCbM1vGR/NOJ8l9m2OWrMgtGg2xpiLYNHOJLJ5PQ3lnn5Fv+42gLpAStwEn3vS0CFD/tPIGBKtSg1lqQHJnltX2rpEFDeKs+5qx9mNQpIRHAJajBPyDkY6rrYyvq6S6oJgIZ5Uo6gkD4aktp7iQSzYFQZoBoXkLLXIrQBQnFPb4s0N6t7O92NXWLEanx+UsRLZ5KWAnG/SXTccRVG7szHH8DHGmYDq/BvckrOX1mmAj7C6arZ3Z715rVq+yL6tcgEBmwiHDVvtSJh31jtvSTJPdGSdXvjzjtzv3KzQO1KnimpWSZFrrtr60Sg3KUArwthqFXhI0MgLdSXqJyYrJqMyMQU0ATRiosTDGiD3/WkB+ctdf4OUMX3iTdWTpndCLcnLB/7xJOSQ79YcBQ0ycjJcOElSy3nF7cl0GydKrMOyvMYqhQSf992nekTXk28hdpzeVcMURb26udVCD7xTTJcgV8YtWeF2lCTUKS9o7liecvRd7e4MJFbYUMquwmeA2Em8g0teO4ToEOSJFQ4jtw6noG7BUYcHPrrF1Je19WGTLsZg2zDtKOJWmlzQwz789D5QxNrQze1yAP1YFnXAdpR5nYrXD8P80SGIZawLz5jxAqv/gSM/yyW6uWGQtZ5Q3Qv7Z/erQ751TSnhvYLp2/dt7iiLHlVW2w3BQhonl/BA1d+SF95WapYVk3IGV4YV0pa+bApzB3ML+6b/U9b6FT4ikXHj1LOC4YrDhrHmZWCsYZHkceSa5CLmKHjABgsdVCMRoG59+G1AnM0h6+X0OQtr58m1PAIz2890wZmmNZct9liemZzZSyuIslv/WTuhY2tStFcjtnygpvV1Vq9J55w6K1NZ3WUtunGdah803kwSWSjOZJH2+M7fpDL7Bqo1DGEV/7vnsOF38H92k74d9/Zo60XUpkrFJcbjwkV2UIqP99+YAYDN1sAi9zqhyWtVDTKBTj3O3w8RlOEqv5XerdjYKqSzrtSxa2z2bfaHrstZu1Vb26b9O7TFXTKCt3YXn6SS3vTlRRiDzT75w4sic5P1uv95Ja73OKKIAhBA3YyiaPbn/CvnkHOrdIeUasTA+zrvrrOOCJQ+3kfeZL//l8/83U9ZUowzBl28/8cf9YDRfN9uGTTG7MZlMSzrz9NzUu3nqgE6O1OVSXzfnLbahMjDFRySIm2U9X31aSjmS5kTj6ev+q3DuiKZg+3qGbE7mQy7xz1e04mczaAQjwmtx/HzSZy576kVXcmiH3CGuYPNV00ZP+ctzDAu+IzDDuA1Nu4/f3nxXEdh9Eyu9ZPI93w8peXP18+tZzh02pD5TCMQbbRDeOJQsZ8yhiG2MTWulPL/vXmkhR0xRTBhihG8QrbCG2qM7mGBr2KUxuQW4ABgHjJErWOaUOnBdcLyIQMLSluOPVosw+JvJupb39CAbTGjh8NYmSC+nHr9b5lk3XqIlmnMnYWP6w2eoo0RXTZ7dm9YiJTK8zIhW3bkCxNMXwBDYUINpSREORtVouMKcNnUIrmSkhzBQaaqymb9cUjtxopJaC8pqrgTBvsiERNVKqz2cJvdDwhJg3CjN2osn7AIKdiK7jeUPOAUP3p53dBRa4X9Jrt7ATPuLDH14IaJosOZqEYzVfRAXXJvZ2Boz4vX8pB9VKxMVVsU/zw4WJLT5AboR/xQxZFO812h7PJSG9mMuzTpmH/zoRYqyI4R9wy4x2BbIar7p4EkqfLr7obsXVQXFzttw+iEJtCiZBinwparP7wmAo5NTXEp7fpSSpC53PF5mk/7Qc7trB19zu33vB9t2P7Px2aD0Tuk+BmXGkDVZqInHliA7+7syEvFTeGdcPzCVj/dSWF9uZmpyfhBjnI7RY1k7bYQWfADntI2EHncUxT5rNmMh+lZIe3IE1lDnlRUACRkkqxGf80gizcHn5gf7AIRSgACO7CVSuz34BnD+4GwVg72i3aJgmAwDsWki+Go/VN5kntykL64PQm401KDfoUy2snPh5PWV1RD9D4SAlkh5Rgjzy7cmzgTpSwlg60K1CI0pSrehJznh6O0eUUQwLFlyhA9E3mKfxqwWjeCma/M5pTqczz+BjhUPG0vQsW+T3MHa8BezajWwJM0m63Eu4PLV8DEtboaP/oOwet5IQZ382PeLskrZhRnN2wPBRjc4VKADTiYBv3AwcM6cG5dwye71jrCafVQxEq6kLWfme4uM9j2iWRUGNYWZkxeS1yn/MPefCBn3dGy3lOsgXLrpML40u+G74Uqnb6DM/KWJ85f/n2YkM9xr1JttFjzi9IFdrc3qrCOObTNX5uVZ/kHe4SnxG7OPI6W8j3bmDgfw8RpRBGJu8jhvmeVZYeUql/Q5n/oeMTvJkpi3fbnr+tbEvZ1jtup/Cs/S42plblmjvsv1dloVIPHtmH2PKWceblfZW8Bzau9nLt2MDa4r1baGVN5M+Xwst2oDWvwWijxdi/tGFVgz2oRmM5XKtb8ReCqP8XAAD//0HeMSs=" + return "eJzsvftzHDdyOP67/woUXfW1nCyHD1EPM3XfhCfJFsuSzIhSnEsupcXOYHdhzgBjAMPVOrn//VPoBjDAPMjlY2W7irqqszQ7AzQajUa/e5dcsPUxYbn+ihDDTcmOyasX518RUjCdK14bLsUx+f+/IoTYH8ics7LQ2VfE/e34K/hplwhasWOy82+GV0wbWtU78AMhZl2zY1JQw9yDkl2y8pjkUvkniv3acMWKY2JU4x+yz7SqLTw7h/sHT3f3n+wePv6w//x4/8nx46Ps+ZPH/+VnGADV/nlJDduz4JDVkgliloywSyYMkYovuKCGFdlX4e3vpSKlXOArmpgl14Rr+KoYG2hFNVkwwZQda0KoKMJwQhp8m+NritF4tvduxYhFMpeK0LJ0k2cpTg1d6FHUIXYv2HolVdHD3H//fadWsmhyi5u/70zI33eYuDz8+87/XIO7N1wbIud+YE0azQpipAWGMJovEdQOpCWdsfI6WOXsF5abLqj/y8TlMWmBnRBa1yXPKUI2l3J3RtU/rob6R7beu6Rlw0hNudIRvl9QQWYsrIIWBamYoYSLuVQVTGKfO/yT86VsygI2MZfCUC6IYNqwdn9xFTojJ2VJYE5NqGJEG2m3lWqPugiIV36x00LmF0xNLcWQ6cVzPXWo6+CzYlrTxfi5QYQa9rmHzp3XrCwl+Vmqsrhmq3uEz/y8jjgdBvAn+6b7OVrZqSDSLJmyCCY51WxwnHQPcilyaphoGQMhBZ/PmbJHy6F0teT5EhBr7GGaK8bKNdGMqnxJZyXLyOmcVE1peF22w7h5NWGfuTYT++3aT5/LasYFKwgXRhIpWGc5Hvd0wYRHq2OMJ9GjhZJNfUwOr8bthyXDgRy3DNTk2AoldCYbA//Ucm5WdqVMGG7WE8LnhIq1hZ5aMixLS3ATUjCDf5GKyJlm6tIuFDdPCkLJUto1S0UMvWCaVIzqRrEqfSHz1KgJF3nZFIz8lVEg6AW8WdE1oaWWRDXCfuamUjqDewBWlf2TX5deWvY1Y6SWdVNadkhW3CwtsJSX2rISE3ChGiG4WNhR7UMLTrQYZfkmbrhjs0ta18xumV0TkFVYEfBWu06ROaTPpTRCGhZvg1/qsSVUO4IlUQsTLBm4bykXetLCmFkisPx/zks2Y9RkcE5Ozt5OLEfHiyGMny7LbS+t6z27IJ6zLCKEmOMUkmlkMksqFozweXsSLHFwTbT9xiyVbBZL8mvDGjuDXmvDKk1KfsHIj3R+QSfkPSs4EkWtZM60jl4Mo+rGniZN3siFNlQvCa6JnAPis4StAIV7pLq73v49DOZPiiUKLkV4PsSpyMhVdcXZsX/+A4dOyCdLoYiY3tNsP9vfVfnhMJz2/7cB5DtLKldCaBkBihMUoHBHGhnSgl8yuHyocJ/j2+7nJSvreVPGtIFkrvzCiVlJ8r2jU8KFNlTk7jrqHDVtJ7fnLRlr1hjLFZqKCpBTLGMlmtVUIZlyTQRjhT2AwnHk3nTJgJ54c1nZyedKVgM4OZ0TIYk/aIAGPIH+kZwbJkjJ5oawqjbrbGjT51IOb7fdyW1s94d1vcF2++NuJyDa0LUmtFzZ/4R9sJe/RkEjkMFsHfFJe1NmKcpEYF1hB9r3VzCWm2bG2leAj/O5JZRkuHGiSQimovmSCzaMfjfE8B7wYhs78FHwXxtGeGFvyjlnCrfDHi/AwyM+h4sdbn/97cD+BEnMMnW8BOD7ld8NYPm8GFzyc3o0f7K/XwwvmdVLVjFFy09Di2efDRMFK+6GgFd+jrvgAFmSFXJVRcty7S4hTWiupLYaizZUWUHD8ocpkjovpuHWugo586/aCT1m8pL3RKoX8bPNZKoTN5DlEAWbgyxH8VhxwQ2nRgIyKBHMrKS6sEKXYKBVINtEWUmxBVUF3JL2tpRCT6I38Sqd8YIrfEBLMi/liiiWW4UI5YEPL87ccMi5Wsh64NgH9vUIGLgFNBMFvn7+t3ekpvkFM4/0tzg+CtW1kkbmsuxNgrqn3bvOdApUamaVES+OeGQYRYWmAEBGzmXFgjRhZXf7pmGqIjteSZZqx15Ois2ZSqYXneVolHLcz04uxD2csSAIRvIuTEssKGLhd7AdPIYZdU1HLH5oy6ka3cDyW6mTCwvSL41AFIMQ6sRKZ7ogA+O0iLTSWDuaJRfckl04wEFBT06TG2/PT6RYrZgV3OD6xJvcapyaVVQYnoMWwD4bd+mzz3jyJu5u5Tpc+kaSS27XyH9jrc5g18gU6BGam4Y67J/OyVo2Kow+p2WpEZUgbRi2kGo9sS/5e0cbXpaECStOO3KUjcrxbiqYNpYCLB4tkua8LO1Zq2sla8WpYeX6liIjLQrFtN4WfwSyRt3BEZSb0F1wgW1UM75oZKPLNRKvM+vwskzG07JiYNciJdfG7tnp2YRQUsjKboJUhJJG8M9EW73eZIT8rcUx3sfpeEY6BUfRlYfNE/00cw+miMNh8QIMS630UDRoLEHVeprxemrBmmYI4tSqjTUThZMFgdCSIe1dAYpNNnKT1xve5MmLV+zR6VlYuOOOuFUDy3XGGwuiVEHbJ6dnl0f2wenZ5dN2g0fgr6UyG66glGKx2RrOpDJXQh8MOTTfhiD09uTFRkj0YCAxbAMSxwJxgs7sX5O3zCie6x48s7VhA0xgk10JAsfB86PNQPyrnQz1aauQxNeNkXgjRVpwn4DgGrgztIcbUhbOthG4PVAXLBbznaT1Q/KwI2pdA80PTAYDFrUqiFLr2HxFia5Zzuc8J6VEky1RrPTsyN5xl62Yh3+ksnCm5hCm+KW9de16gcnGHDBGb3zRENLxRaTI8AAlkw9vXRidyU+15B2Ar8APIW+kWHDTFHhzltTAP1LlLRDBN/9Ldkopdo7J7rPH2dODo+eP9ydkp6Rm55gcPcme7D/57uA5+cc3Q+uxtzsXTJhPHXvGdavqn+dr1hTbNcKsI0t6J5VZkpOKKZ7TYbAbYdR660C/wHlg1hFYX1BBi0EgFVtwKbYO43uY5ioQ/71hM5YP4pGbL4BEbq7E4FspjGK0vGqjuZafcll8kc0+Pf+J2LnGNvzkis3+EnC6Db8WzN1/fzEE6dh2DwjLtwbxo2Zq18vF0ZuoSXsmOiHO4ITakJyThaKiKamyFOPcLIrhtZB91d8ulFaDkQ+5C1d4meRMGKacljsvpVRENNWMKfCFgHHD65O6MzSCWJJ6udbc/sU7UXJPyroHzjsJ5jn7erlGtxQXhDZGVnBzLZj06x7ZsZnURordIv+qY+iQTdG1c7SPNjNzfI/3bXSNogQgG/CDcDFXVBvV5KaJnSUtYuw+9Ayw1/pH5k5YQ7Ogjg3IVJBXLw7RXWNvuTkz+ZJp3Du4s3k0PXqhWpjtRZ+6EhP/F9fBzJgCEQZUjXD+K8UqaYJZksjGaF6waK5h6Chx7ph4yNhjAx876ks9nzhsOxR4odz0sSPITZAibjMdOSagWslLXjC1kX4cqJHlh3cT4pMLH1bsAQnewtjVzfLDCVnkbEKkShkNX3BDS5kz2tUFggHgkvKSznhpr7PfpBiw1F+11EbvMqrN7kF+txWfRGCQ30AH9h4OIEmg9XYzRxaDN8lGKxiDsb+yzRbgbpbbQO1t/tkd7dQBdL57cPj46MnTZ8+/26ezvGDz/c0WceogIacvPfnBEhK/wzj8w369+7EkBdCi62oT4Pyvw06o22DXHGYVK3hTbQb4W8+dIm/VBnDTHOS3e6OJp0+fPnv27Pnz5999991mgH9ouTjCAqEBakEF/825I4sQQ+LcH+s2cCS9qK0QwCHEgVA0HO0aJqgwhIlLrqSohi1O7YV48vN5AIQXE/KDlIuS4X1Ofnr/AzktMBIDw1/AM5UM1XpoonliZY5yETi9lxY6jzeTGMJXqYXcmbF74U6RJd4r711wCNqEnTvDmYblPB4G7Kaa+SmXrKyt2IxiC96YM6ojoglzaK/nry2jMrzVNm5oTHZfb4sFvMfhSUUFXdgbHXhsWMagFwzju0b41jZ9ogEswruG4zB/RRfbZZqxHAGzBRMCgraimswaXpogHI0AaehiWzC2h8VBSMfuyW1iqoWi1bZ7ACRRlZuAkERYkhCs+Ok29x8gxwcnki7/ilxEKQd72fthMx4WfbeBCzH2UIGeikbaPRebesWgN3AeItdr457JH9ndlfjsHnxef3ifV7Rff1bH1/gSvrz363pYtucCi7nMn80PFrMN710CvvcHdoZdAXMP3geP2INHrL+qB4/Yg0dsUyQ+eMQePGIPHrHbesRYEHqSHFOysV74lhm6G9+M4Xo10g72O6WuDCauXkNZr16c+3lxB12gooTVaWJkRqYs15l7aYp5IyrNGLWXatVogwHesE3lSHiq/fOz1Z5+bZhaQ7AtRngHhYKLgudMk91d50ao6NoDZBGsS75YmnKdHp6QqxetCMaAVSGYpZXbuDBsoVwwLC1+sWCjxJZqiPmSVTTgxt2zo0sCQ3GjMFvQfcM1OYAkoBkz9JAM2uaiF9pBA6EqJTvG2FfRo42z/lqLaA5JNS4gGMcHdYWKNbngosgso7ErrTA4HV8wy8jziflvdmtKhn5Nu4k+5Q8ivDHnsps4x41m5bx1Y1qx046fYHNzt+SXyuaYuzy/PqxjqbHXARSlyF4DDex2mxI6OHfncrw3TODcdnTP1dHc3MdEINfLXkbFq8vbJKkivQz5DXw0+bDroJQLgs4FxfOE6jJyAr+mWRpe8fE0aRcY5YiC0WmJq6Zt4mdG3rQJysD1fM4q5Cvwitlb2HtA7VM7RPt1SHWV8zjV2Q9CfcokgYwXH+7gQhjaPBLUesmMYdKIV0aptxFaxS5WSydoJRtIQ5kxs2LMzuHj00Xh4hOYchO4dA5Me81Lqe1KTjyqr0ertxpJxazQAHpICWNhJgD8M0kOtkAMI3Q44zbBa0wCLWorVkm1Jpb9QY6BG6joZCpfNqVgCh3xvM1Zdq/pnAq7UMhbvt1Fv1XWdfrSbn2wUwf+e4vsMXsj9CG9HzOxPed2/ORmHUsMW/BL8Jt2D/3KnkvvVE6qJ/gRk7H81TMBY7odwJ2eSHzz2jReZzFsrSM2GdTypym8MZ2QqTbUMPsXWlJVTTPyM1X2AECy97yB8Kggnci5lVYmZJWKHnVJwYjk4l2s8OwKYNA8Z7WBjFgX+oK3k5dwJqQuGdXAMJMhwXmQ06YrLAdCALhHLhiXq7OVSwb5hJthbPuDyLDki6XLfRq+AUZ27jSlA66REUGild32JRVuDzNMRptOvDNAM6FdNlKrjNCUrBz4LZxBlqU+GW0DMkg3jN0DGSQjNpoNkMEQLTRW1wQHM/DYYarAlW2DJiBdGW+mnNYGOK/LRL6SSQTd0+UftvTBRUoMgQDag7+kqQXSUYPf2ml0vcCBB16/S4vCnnV3Ye/Chc2KabqV0zkv2W6umL0+p+jmwrowXLf5rv7+dCvldq4KFO7B8wp7VFOtLV53MWVveKNkY3K5PaexXY2b4jpWfhr9HO0WFW67JxEJ6zQ6s50hNabYY+nTR9v7H192O6WbPAdfHpS3mVNeNoqljDkZc5xJ3+REpkOOMukNT6Rbw/AGb6u0wHsGEiAK3g4rzYAiYv+c4YropYR4qBCY0haUsgQLZqQxFUoWTbn1ihg4i7NVbVQXAhPTY2aSfBGNqoONCnP4pQqVTQaPcLXWv5bDyLCgabapp/TW2HDTjJkzpLBEjRbGqXt3Sh5ZdqaZIXtOytbMfGuxkq7e6gGpQaWZ2a+scI7oAk6cnPIYzSH72FlVOvYeV9mKixYIrJIDpqjwyO23JWCEOuuazRMJaOSEaXbJFDebSkBjHsadZzub7dG5m69zpXkwOsLNz0tn9B0OOwxfOVGhYuAiFJbDRaGKQQsMRbPs/nyjSVMTIztcN7mfLEes6AUjoFO56bhjv7kUmmsDWiXa+QZNaOGywjz/8taU/zX5aInINAIywp1N04WLc6xvpJdyJTAuMDflmqyZseT6f6SQWClPqotkSCs/WN6uyYolgSlfk1NN/r+vDw6P/sXHJabp9nar/g+q7kl1YQGBEwWWjNZGlgyIwaQ8v9CDVLpzzmpy8B3Zf358+PT4YB/DaF+8+v54H+E4Z3ljtxv/leyb3TkrhaBop/CNg8x9eLC/P/jNSqrKX0Dzxooq2si6ZoX/DP+rVf6Xg/3M/u+gM0KhzV8Os4PsMDvUtfnLweHjww0PAiHv6QrsZaF6m5yD70AF8v/oom8LVkmhjaIGDUFo5+VmSKtwbB1vJ0cVXBTsM0NbdiHzT1FuQcG13f4CORYV9vUZ64yIZeBYgRVKeKiopCwzYsFvPv2E9plpvL0w9zGZ0zIR2lsw4t96h2ZJ9fJO4l1LXW3M/NDfTv764uXGO/ea6iV5VDO1pLWGimZQ42vOxYKpWnFhvrWbqejK7YORFl0gQ3UYDtl4c8MF2qhuVMH9xBq9dAMnPNgyCEGF1CyXohhyD5zOHbmCigA0hv9mogASuxCWJwG3Qt2gjSzreiY8y85Z4NkAiUDaxRnaCOa+vMgrtnGSy600gnC02kVElfiSqqXfaBJqtLYV6JzBLr11HNip5l8qRos1ecSyRWZ1KNqUhpyvtSWSMLD+Fu+yZDxZu0I6ECy/4npIrj1p5fowP84OnOGYUHvMpQDz5elLB8fOq0bJmu2dVNowVdBq59tUJaSzmWKXaE/1n5x/2PkWTLSCvH59XFXt1cxp6d/a3X9yvL+/062gFEw1qGRuSPVFXOzyyi11yjCO3subG6xE614ek6jbTbeSONeGi9xZsP8t+s2Vi4ke+cl7EolTwuH2dC9nvpwogKqxNl1LFZ5DD8tNrgZQBxhkPyUXKGl2Fs6xtG5cDy8Zc7aOyqAphrQOrqaclhmZtuucomchrtAZfku35rNRNDf+eokhnHT2LQAblsB9KeB0f1yltRyjZ+vaylESHA72BkajjFWA0MM3sDk9ntW+MgBv7NGwE7TcsQt5nyivoTVfog7wl26+xX/A/SReRcu12pp3fZ3AstkbsNCbHjZk49ceNWdysoxjEEk0N/zSSv8WT3OutPGVTccWxm5k87/psuwtde2iYKp4SWEZyYh2SSW9fkWK64tPusMCr2KM81LSDT2077m+IDA2FjvlsqehOd6tnWBOtCzB3OPr4Pk/HzXDkllYi+wbHbQhJxLY03btEj8JqaobbOAN1voObJX8N1bAfNcsexLcZSVI7fuWhxzs719Rj7SiXGCoD9YYheJgVh+tMFqfCvAjulptaPzTmi86t0ELnIYy6DDMimKtGs0Yoc7sCktB3DrllJalr0A34OCe88DPO85s5+7+vn1hDI8nMErXY0qcaST1YYHTWZOZFfE8K3SOXPscgm28WxLsGwB5BmD4muD+kqNay5y3tZBBb/TVApPSdoi0PWcz8T5UIOIJMUupmauMjtZqmOzUy+PkrRTcSLge/vv707f/46uogz3MZaRDQUEIH0FTr7en9nNq6HzO8LKwr3fXYKIi+s7ocyOPbBtAbloFauzADEvCyTafUQuUdDn7ZXpY2wL6asHMp/ua8wMMB0sAsUOvq5KLCz04N0yQxJjdYeaYOcBuhtF7RxwOeMjGKeWKMKrXFkeGAanM1o7Y/BCR9SNop7VT0roIje3fd1gPrAGcyWDinJCCKzhrDqXfDqK0YEkRhzvM/xJGGklyvZKkuIhjgO4AwqkdqDVh+YAf5Fgi/N3xmSFQmii24Z5oy8qj4D2w+tXH05ffIidxt2kUqfXoHH5skUXkSnRKqAVD4ypOLL4r1cBo34AJXPVyJ0Pax/2g5kzxiqo18jbAyQ+dZQ/PnqRk3Nv8cSWC0bmr25NnOPz7T4/2hwF6a2k23nUuiMwNLTu22EHQNP9tU9ASI1GfBuxIdmpIn7IsxNkWpRVpaFF4NWZqR5sSnsos4CSeDrOYKkkovxrIRB5PgHxjJWUIpgIkuUgJEKIrWdgTVAzOnm9j9ooZijHl4LkuBoStmGB9jlT0aPNoQiTUKJqwYk4WbCNh4R3tREplWWDJLqnoRQYnkVT3EPV1Pxa38aBVXLsvnw5se68uqbFS5u+QYR47HwG0gX2PGgK4bX/dPtm0KLcvOpPI2K6uMsllVTcGoxpd1RaIGoeIvqiJyIDtMu4i0kqp2DNERCGKaasQrMkhrg9htCsFvLYxi0uqihVVbEIuuTINLX3NFD0hL6GwQ1TEAtWdH5sZU4IZMKYW7LZ54nZVw8Rwdy/0azd2XAxmyHxjooLw3mqw8v7OqYdware0sktXzDQKK3NtWGNmWyt8t9HqIF3T2fhgXdGaorV8hNR21Etd+k1Tdjzivza0BC7uk+LtKD7o1wLjgp3aGCMrrWA4krZnu1M2i+W8CD2PUEk20n4zlp++zaBWPM9DFr4THQjVe/JczwksfzMBA4Jz5gX+bq8ALhbzJi0zwAVaYDaqx3OcJH003js5hW4NsIVZH0n3ncQPHIPXPvX8y+a8v3bH65rZt937ZOR4fS+Vq4zkC8e5vhrOIpKUzbNDQQOjaShtNU3Nc6dzcllNfL2dKFMusN9JbPeP6jBFRp1kxJYINyC8EHep8iU3DAot3hqprcP38/Onn54ebejU/almipq2lVMCzECiu4xlXHeZt2OcwxjRGzdLereH76fzbiuz4bBg2QE83lnFGvDuHyejG1l/cjjteuUt+mqwSqWf7IaeYZ3HvRZHu8B6P8VN3chtcue9JJcMvoXk096++4nJI+jhlTNhpJ6QZtYI00zIiotCrrr27bYeFVUrLraYSduS91uaWyL5z507LBYV+gFo57TinUv4rvAWbMapuAm05w4MtxXQ3LNYUjMhONYE2hTOdBFvy8Bi+smnd1/NwX52cJg93VX54V02wOdTghCv6Ipoo6CS5MAyLqzkW97rKo6yo2x/9+DgcNflC9xlLQjfBkt6KBYysLsPxUIeioWksD4UC3koFvJQLKQD4kOxkPsrFrI0pmOFfv3hw5l7ctvi+XaIENNy20Kz2FMvq5hZyq2Zll8bU/upCE41ki6CDg80FEHs2ozFYRZGklKumIJwLKsn+/ofGTln6YnYeRNefEFrbuwIsHM73gm5c+rTD6xo9erF+Q4hGrPZB6PmF8xMSA353XUzktDo8TmTxTpz3pFtYfWDs+ABdQX0wsxD4GP79JVU5Uiitocd+iKqDUv13yolDMdvM9qAkv30Q7DbFerjvb1ZKReZe5rlstobW4mupdAs04aaRne5+XWr2TyQ2xE2zkZwth5DD6s42j+6Bt7fg2wc8Lenm9GKQ/fIPNwcI9VvDlLAxqtShuM5XJ3yHijigzS07LhxncTsT+gji2rQCpaMFkylJo52WUePn23AZLa3lPOrFjFKLs+fj0Ltifz3Qb6j83vAfnxYvzj6rzuuCf5blXeRih9vwoOrxQ103NAky11GBWduKXYAlvpYu7s1/41ctJKoj1IfSyXHItNJRv7PJ+/fTSdk+ur9e/uf03ff/zQdRPOr9++Hl3bn5MPxLD0QaMGJ9XZtFxabkG6U/DWKxs5FgQG1YPv2QcQWnz6LjnbDsOFaid5IhpuxOVZLKLlBv7khDSREhEIXNVWDddFO0b+paKiyRqZuCldd2xFq7AmFNsQ+TaBO4+xJTB5upLhwQKdugFv8pLfAjnMHXbFLeslCTpG2NIahMbkvF1fXJWcFeoqYyCWW81ZEsFWq1HHBNLSGukTZNy8ZFZBLm4I+Fg1909REoqXLOfyml5toJW1w+zpvCMroG6UnJqzIRQmn7Ohd8nDzqBwfctzvm57LqmqEwzkGtspLpjxDc9EWKg1adrEWru23++lWwRx+2JA50Y069hbQWzLQrcfXLPgls3eP83pBAT/p1SPdqukeSUMM7AeQFH7mcz68iG25dE9Rv/vp/BTC+ko82KvY1uAIjryha6YywuvLo4n9/6f2/zXLJ6Tm1YQwk/8h9dSr1FS7lmF8cyroJ7SfbIt2CDk9eXdCzlx7f/IOZiOPvAK3Wq0yC0Ym1WIP0y6gUNte7b7YRfj6D7LPS1OVHW8gIeeGioKqAtDuC6n4b+Egc01oyRcC8+7x9L1j5vtSriwv7Iyn4bm3skDWH7KMxiWADa1vcB+ejhC9okLfoIPBzdpmQPEKHU5ltOMuo1xow2hbXYWRH3H82PqWDBngJaU9K+RRU9QTYvIaz8suz6saDkr27R/yqFx5VkxeD+8S3NE9N9G9HpUTRDkyWvSJRbM6yvV5N2rGjaKKl2uXrIQVddKdWnKx0ChWVDxX0ifK4NbTUss2DzN+WV+sazYhPP81TTCe05zNpLyYELPixmCcV8xJvYVUc9M44aat13rJRNGBsE3eCVmzLJeFFTyc2zmkc6IAsVfYG+T0DGPjdQqeJUoNETIrrnxG9R/TrngVDVJeDdOg52Jb0ZOehSvQT4PuHcI+Z2AZmpAS+MYvNLcEELiAf/3Ph+hghO9huuCKba0S3Us/uNc5vGxoFJ3PfbJZ8sl7ZsVXTGBtxfTjzlX1T4SLmWx6V9g/EdmY4R+4MEylyin+YFna4A+NgKISfRih/HZF6zoq3Oxqx1rZehda5JGqTeRzVXcnQXgGsSxlOFjoy/MAO843moDj3SLvkrPVWCHwYUg8qqUiNVO8Yoapccg63CWCsgtZApL9L8TdhRR0P9WwfBZtWo8S51KtqCpY8Wk7QZ5Ru6aQFu3yw6KfnNJfK/l52Mh08N1hdpAdZIfDq3DKl1l/2l66wglUrMEKywA/6LVRA53TMyz/664J6uQ/GtbWZa6k9fil6mMWTCGUGCnLXboQUhueE+2kz7hxZ0rRpVwNWTTeMKoEZiRTE9wbC26WzQwcG3aroUT9XkDmLi92dc3ywR355uB4+dM/63dHr//57Q9P3v5t7/nyVP3n2a/50X/9+2/7f/kmBWErfZuuNcyiJROuEvAAAa5n0irQnkeOlL2ZujZIMIIrwhg3xvLPfQ2cCZl6Edj9hCTNFdFNNYjAx0+fj1zDd2kMdS1O3Oh3woobYwAv7S8DmAk/Xoubw6O+HacTpuoDc9OnG2baiDBaP6W9Zjmnpeetk5CziUkJrcDscmhDH92CGZabiR8ZXsf09+vH2vX6n7tNonKAXi73IjAleaONrEKKDY4DDZYha8Ktq5OHL8WcL6AorZFENeIG69RybuxEUa1Sn+Yz54qtaFnqib3pVaMRLwapaK9WsB4YxKeB+Dsrug41E1oqPSErNktmjoaH6IxSak2GBrX4Ojl769buzGl+i2N7Gi3LK8xpTl7CYSHig4r1BFGJq9Jhf7UvN4B7rNvL/wpUdtP+yVtn2f61YQ0OSV59eAO5XlIAKfgrwhUKSrtWOBoJVXmgbmHBoOq7Wz30h3z14jy7RbOKL9d0sBeD/gX7RwY66U3+JXPJxqHo6bX3BkNggjhF0pN6AIy79fm5KkOjhaPjdW9rmSpOyy3bEgMYOJuL/OoDs7XMoGXaaz5sj696u0ndX6ZcRplllP5m83bKdsR1zXTWd0gmg029cqCmEzL1zNj+nRca/lNrV0j88xr+IssSX0aWbv/WsuVhv6Yf9iEP5yEP5yEP5yEP5yEP54q1POTh3IXhPeThPOThpLA+5OE85OE85OF0QHzIw7m/PBypFlQ4N6L70Gsy/V82D0OLh/XXMROK50tEH1i1xnqNVTUVa3vpImLCwLGW2Ykey9J+rEtW1lCelCpFxcJ3KjGuV07U5oQKDAOEwC7XTNEFX4Z548XcNr53m+Fp8U6RXp2837dSVoy7LKW8TrfoEc15c5q7q7bc15RHteQhDXlQP+5pxwO68Q0paUArvl9qugdtuKsLDy7kzkfiaj34Jku84tD0tOC7wNnXf6+C8la67+Ai7iMh6Vq99yYIH1UQB8Hvab13gf5Kffcma7hO1yVdB6HzkKRs7yx5eJve46PMLrQ8zka+pKK9KaFvE4R3eJ9N0jYMIrRDC2Ve7CWn1wWXxAH4yJN9D8es5sWUyLlhgmhD19pHLPlOx9jE3CqkUQRMLmuOajlUNizljJZR7zsPciT03JSXblxdbXMv9lnAUcoRXTs011PoiwoIHqQBNkdc1g+0aSBWvGRQ2GuhaOXkXkU0r3hJh4N3RhdUDyL3HtLA/GpqChXieuXr2pJei5vkod0Ko1Qtmmqg8Zr985aurQKBcieSca2kYbkBhzI3/JINe7Qi9P73jtbLnQnZ2S3t/1vhwf7XtwR7uvM/w4tnn1neQIedbaHgZAYdFximkrgz6hlEO/3gqvYarfZmXOyNUg9wx23vHkwyErZpVwK/TzBjCQ+I8U1cqA5rxSjRF1RgQHHc+Sb1oERl7AglMyVXGnx5PvnLAeRxuWIzUkNnGN+q0YquYrQfB3ShK7K7nLo2MfvwaGM/FbTmOX25nYYu7b19uH/wdHf/ye7h4w/7z4/3nxw/PsqeP3n8Xxte3x98z/uYTF2blxHQV1JdcLH4hFFHg626byOB7C1lxfZoGde3vxZ0BwsJsHhrZ3LFJ+KGs2qn4sb75OGm4kbbeYxhl2df6nlOc15yY8WGml9KIGSqZCMKKy1whlX22/60xCeZwm+625vDxcBrxqC7dEXF2qofOWuDRD7Ek4YxsUsg+J1R8awmBDLXQrgwHirupAZdSwFJhi4hsBWNpw5tWeQNPoGmrYoZFve8bAM1mJ5E6ZYzRhpRMAWqXwjHURMXljmJYzInJC85dHXxL1kRyMejxbGvGTnFxi1uWbQsIaDTyBZkXk8nKMxRkK6EwwsghbrEitMzYhS/5LQs1xMiJKmoMZAHCJ55AxNQBR0X1yEaPZ7kmGazLM+K6W0rdg+EzIwepE3DZk7KkOFs0QIkJH35z066cxS00YvXO79FtJ77aCDp0lEaVCuNoq9zKYQLgYdLAeOlFFtQVWDAmYZuHZPoTUzsmPEQA2llYUzNyqUqNHZl+/DiLLSbwea2HjIEJ2fc/tthigsObfDO//bOxV0+0qHngR2qnR6Hx8qrIZusO4crBV6u+4vvxPkL7fuLAztwgXKE5qbxJk7sLsZURXbCSDtYX37uYk78zKIDrPb1l+Fnp+54e+xAcqqvu5ojA9OdwWPYXXvU82RoCj28EfI2dI9DWOMvjchbHQqPu/tuaJgWhUKaaDBLJ7hFu2jQHmz4+wKH3/PAp60aUOWjheXjFRWG5z7S37s+P2PjgEnbJ9oqiPOmtC9ccrtE/huLLLGC5EyB/tmmPHlWpcLoc1qWOrQd9N3/kVe5HGJteFkSJqDbMbw2EsVukTTnoKfQulayVhx6Et+SGTkWvi1REwOYsKccbkm4MzDR3POLasYXjWx0uUbadW34eJl6+3XQ1SBkCjzPE0J9cXLg8w2UNZeWVjJC/tbiGCt4p+MZ6bLTFF216Q5I89PMPZjGzu2ubCLspdFmghcNhpOixjO1l5IFa5ohiFN7/9kbDFL8XfH+ZEhoRmrFjDEz9vYjLuNIx+TVF3i/dzwF5PTs8sg+OD27fNpu8Aj8N0h1vYFSLJW5EvovHzJ7JRhIDNuAxLFUnKAz+1ayPNocoOdHm4H4V0j7gA4pbXqni3tE3Q+viTECukv+RQvthgremcvH2ATcHqgP4T0P4T39VT2E9zyE92yKxIfwnofwnofwntuG97jiEn0TR/tw8wALX6miq0+b+DepINjG3pttXy6M+aGxZ68sIYJiLHBnzkXhyql5vySUnkFLlr/jw3h+evtFJ0fnHtrJ3Vu/pShAxpcvbIRAiw8sYKxuGS+8hoXtl8rQoXON1Oi/x9cresG0VaJqqTWfddrlG9nFapTOiTsoovKG46CFjk3eNKkYhMYozkQOPg2tG6bR8mHHVKywi3Ht4UD/Twa0Ip2L0/Kdmnnh20uHXEJRtLSAlgIuFtCg0rWd60LahqM8fsaesNmc7VP2ND/67tlhMWPfzfcPnh3Rg6ePn81mzw+Pns1HChXdKdOudWSwkmrDczTN7rpVbejFiAUhT/Nt4pU7U1fkXsW8LgwA2ViuHRx0hAVDcagUVcqVBq63kslwHt2twgft0PxJVC1x+0aJ9nfXGiolSOTWIvGdYXCf66k29UQo2gZgyRAnJVbqc+Ba0ii4NorPGjuML/yD9KIasA0H9X0ptdHEpMtrjwjaMr1Nzy8ai2a4pY141l3dNSjZIufkVbzz8RbAslwKtY/nQL2q0aaTcIXuxu+lIn9l1Oj+MFxbrBVsTpvSQOWGOniLAh6hW2oyrvOEzImQxI8TetttowXZyIm4iT8vykW81WmAAbzPxqXJY2/PgasnYZL2fpMdMvYg2FGv4ZYwYCc/OoU4JZZJZ+dCxalkhmmCyO4xiTyyZivpoS9czz6YoLMvNw1MuzENPc4Os00brv2HC9nqkE4sqWxCPy13hCJO8sKKpNRFGDODLYpTgSVEi1lZdoh4RvDE6iWrmKLlFuvHvPJz9MSUVr4gj/gcbnL2mWvTizckkbzSdhgFl4ImNFdSa6IYeN1dDbZA1ryYkkJCb9XhivfP6dH8yf7+vJ0xEDQ4CjoybvxsMxEXP9nEWxTax1Nni9tLKpd2h9rcOxT7OZyL6HZS7Bf0ajgvzZ/Zq9G9F7bo0ejrG1/Am4FFcfpH9c/hzRiC/nfwZlwFxha9GXi8/nTeDATbuQfiAkwjVPRHcGmMw9yD98Gv8eDX6K/qwa/x4NfYFIkPfo0Hv8aDX+Mmfo1E52tUmSp8H9+/uVq9+/j+jb9hXeN6rGpal8ww++sEdTCdWzV44qJ3oV4qNctb6mHjvW/uK/EWO6mwom1I0yio6eqDqM0yVdUG9IB30riYOy4G6h9O4mJfBSCywtwWiv1fLPKSASGWmILGRXOItC/lwlGd/Zxrlwv2S6NNG6ToS1y2CO9oZnEHlxCDHj4Pw1PwfayoDkBPwk53JaQxc0OK57hbgzOyZbk8Pjp6vIfGtn/99S+J8e1rI2s7/MjPw9RikbktSjmdh71CHZ1XVnVzOIRozUajqXqCbKZVgEO6fDLitFFlZsecTuyGQ2SwSbZIsVwKbVQDdjSpiN8oJMv0xPdItLMht9qCYTzjEd8Wps9h9E57uEko6L8DC9kZOYbHmDZ5PPVNimoaqcIw8jh2bqac3s9qXzoTzdhq0+0aWvapwAwrS3r29Hv+4sK8pdNTXDVTKLmPMfDlGlk26EfpPYxAoasEnDDQOcKRdlLzG2h8IUMXLWfT6atFAdXpikb02UGryHiSgzBskfh5NjSO9PB9dPR4EOijo8djmrdZbos2zqDJ1BhluGPbJQkPGGSebAsye8hgAsesgtADsOIvmMfdhT8ZJqylw3qGyBzO9b/CuWafoTpxVD4/nhHC5/EY+KZryUBC2nGAkkMpzWgt8Hn4jcKcs8aEt9IVmA4i0K7fduSqatPCBUvAN1LfIY7QcaQlnlwyY2bFXH19s5J42sdqLii6qLbY8NWeoMj/AwLT3LickunX04hIjaxHN/PrQSbtgR9ZW6OZ2mau90c3foduR+1uWnfGvmcOgOOPQxPjpSPR6xvmYdlNgfiFrgtnuA4MvIpSL3QRZ5c0IjkjSSs6Z777Z+hmCD4w0Ixjy7l9whkmwLQ3Eky0pBq7G5glFegRKCatJiKgVNHaS+HAH8C9SOS8hWm5YbUao5rritVgyHbyKDJ5Js97JWwGytykPrg/QsjVTx2vRtMNwQqmfbs/I+fjfkJ+aDljiTxwlfS4tNe7r7xQykUrXF0BpxXDuzarO6QonwDA5BU0R0tkx2s4zzcatQwLCtanv6S8bOsA9ABnFeXb047twYMZvLw3AsWS6q0JQS70zzOBZRp+F7MmDBWAF6EymRTrCnpE2VcGLqGPms2b0mJ5CqQBJVaU+wcESoVgImivAJRPy5Qddnoi5VTYC81d4yPo6voG7hVfP0D8TWDQHA0CcL9msQkg6WwbCogDaNqSXiozsZxpTdV65OZJC3K19w+Jn9/sFsIh/V3URkNYVcfVy/ElIPytaL9do2UkDKeXcuW6Aq/YLMRhQABRVGodawFQZWWvJgCe1CL6AxqvHMCXaTxOi71BVWbnrfyNlyXde5Ltk0f8bCkF+xfy4uwjwb+Tn87JweGnA2zl50uDfUtO6rpkP7PZj9zsPd1/kh1kB0/Iox9ff3j7ZoLv/sDyC/mtDw/aOzjM9slbOeMl2zt48urg6Dk5p3Oq+N7T/aPsYOcmV8ZtuDBOthkuY09Su/83aJJwP1v6H/2d7EKS+Guz/WEkYuua7P5wiaRxc1w6QB6K/z8U/38o/v9Q/P+h+P8Va9mo+P/X5AOraqkomJw+Q8Q1M+RZtk8KqpczSVWhfbmjzH8CSS2NNmQhg08r19m6AlcXVCVZcc2IYdpoUkjxjSFtF/YQFsWoie8UxBAtechMqqlZHrsbKwpur/hCUcQCqNb9UTudmK4eufPy4OhfhxaLVh531Y/8Lz+9/Ol4qEeiM0LusVzvYe7N3sGz5wm0gxAMkcrI3nfbQrnb3UF2zi4hgrgvAK+YYkSxSobwo96CPtaFVYnmvGQWp3uc6z3nPqR5LqE0jq/z0Rfes5qaEHd5gwWd2c+GRNBYcBmYruIiNL26wXRv7We3mY7+cqvp7Ge3mA7lnpvPF8tOIVLAC1Ejc0k9sLooxu8mSxuWhkYm7e3gBpMObV9/UkfXjSrDUQN/9EYH4LxRPKeGkkoWDdYDbDSYqbM4DjQKhbjH89z30yTeu6927bDI9L4Kgu9f8V8DU7xwHgzoHysFfBfi4r1tCMwdpStp5Fp/fZUqpwmzNbxiv7XifJ/ZdjlqzILRoNsZ4koGj3Akk8nZLyz38i3+49MNkB6wAifR974EVPiw/wQCplSHUmNJemSSV/ajjg4B5a2Kgrv6YVajgEQEl6AG84Scg7Gui52sr9ukmgBomCflCArpoyWpnRdIMEtGlQGqcQEpOx1CGyEU9/ZNkeZmdV+nu7FNjFiNz0+KeOlN0lEg7jaJjjuuwsi9+fg9+FjDbGAV/kXOyOlLy1TARzhbt7s7sN6iUV1fxLAWmcCATYSj5q1WJBwaq7uXJLknOrLuYNx5b+6XbhaoXclzJTXLpSh0f229CJTbFOB1IQyNKn1kCKSF+hKVU5PX0wmZmhKaIFpxcYoBbfB3PR3AeUedvwVU8X3ijZUzZjfC7QkrMp94UnHoFwuOgjYZORkufGSp5fTsugSaG6fKXAXlaQxVCom/b/vO9Cmvp95C7bm8K4YoS3v18zoEn/gmGa7AL4w6sEJtqEko0t7RXLGi4+i7XVyYKKywIZXdBM+BMBP5kpa88AnQIUkSKhxHbh3PwN0CIw4O/fVLKS+aekOm3Y5BbsK0o4k6aXNjDPvL0Pl9E2tLN40oAvVgWdcR2lHmeivcMA/zRIYhlrAvPmPECq/+BGS/F0v1ckMpm6Iluhf2n96tDvnVtKCGDgunb92vuKI8+VRbbLcFCGhRfIIXPvkhfeVlqWJZNSFn+CCrlbTyYVuYO5hf3C+7n2+gU+EnFh0/SLkoGa44aBwnVgrGGh5lEUuuQS5ihmYBMFjqqBiNAvPgy1cKzNEcvl5Cm7d89TShhkd4/8YzbWCG6cx1nS1mYDZXxuJTJPldPZn7YGOrUjSXY7a85Gb96Uq9J55w7KtNZ3WUtunG9ah803kwSWSjOZJXu+M7flDI/AKo1DGEl/7fA4cLf4P7tZvw736zR1svpTKfUFxuPSZU5Eup/Hy7gRmM3GwBLHKtH5Z0UtEoF+Dc7/HxGE0RqoY/GdyOkakquuhLFdfOZr/qeuxuMOugenPdpLefrqQzVurW9vJaruxNV1GIPdDsX3uwJDo/uVrvJ9fc5RZXBEEIGrCTSRzdvsZ/DQxyapX2iFqdGGA/99V1sohA7fMh8iT/+w8/80UzY0owzBl28/8YPxuAov09XLLpjdkOSuLZrz5N7UfXnqgE6JudqloWw+R2o02MMFDLMSXaTtXcVZOOZjqTBfl4+nLYOqBrmt/fotoR+5PJonfU7ziZLNgICvGYXH8cN5vInfuK1v2ZIPYJa5jf13TRkMNzXsMAb4vPMOwIUq/j9nefF8f9fwEAAP//7/ZQgA==" } diff --git a/heartbeat/monitors/defaults/default.go b/heartbeat/include/list.go similarity index 76% rename from heartbeat/monitors/defaults/default.go rename to heartbeat/include/list.go index 35bb76fb951..b214121c23f 100644 --- a/heartbeat/monitors/defaults/default.go +++ b/heartbeat/include/list.go @@ -15,17 +15,13 @@ // specific language governing permissions and limitations // under the License. -// Code generated by 'make imports' - DO NOT EDIT. +// Code generated by beats/dev-tools/module_include_list/module_include_list.go - DO NOT EDIT. -/* -Package defaults imports all Monitor packages so that they -register with the global monitor registry. This package can be imported in the -main package to automatically register all of the standard supported Heartbeat -modules. -*/ -package defaults +package include import ( + // Import packages that need to register themselves. + _ "github.com/elastic/beats/heartbeat/monitors/active/dialchain" _ "github.com/elastic/beats/heartbeat/monitors/active/http" _ "github.com/elastic/beats/heartbeat/monitors/active/icmp" _ "github.com/elastic/beats/heartbeat/monitors/active/tcp" diff --git a/heartbeat/magefile.go b/heartbeat/magefile.go index 9dab7cec2c5..47dec62d7c3 100644 --- a/heartbeat/magefile.go +++ b/heartbeat/magefile.go @@ -20,121 +20,34 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" - "github.com/magefile/mage/sh" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + heartbeat "github.com/elastic/beats/heartbeat/scripts/mage" ) func init() { - mage.BeatDescription = "Ping remote services for availability and log " + - "results to Elasticsearch or send to Logstash." - mage.BeatServiceName = "heartbeat-elastic" -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// CrossBuildXPack cross-builds the beat with XPack for all target platforms. -func CrossBuildXPack() error { - return mage.CrossBuildXPack() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() + heartbeat.SelectLogic = mage.OSSProject } -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatPackaging() - customizePackaging() - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildXPack, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages(mage.WithMonitorsD()) -} - -// Update updates the generated files (aka make update). -func Update() error { - return sh.Run("make", "update") -} - -// Fields generates a fields.yml for the Beat. -func Fields() error { - return mage.GenerateFieldsYAML("monitors/active") -} - -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} - -func customizePackaging() { - monitorsDTarget := "monitors.d" - unixMonitorsDir := "/etc/{{.BeatName}}/monitors.d" - monitorsD := mage.PackageFile{ - Mode: 0644, - Source: "monitors.d", - } - - for _, args := range mage.Packages { - pkgType := args.Types[0] - switch pkgType { - case mage.Docker: - args.Spec.ExtraVar("linux_capabilities", "cap_net_raw=eip") - args.Spec.Files[monitorsDTarget] = monitorsD - case mage.TarGz, mage.Zip: - args.Spec.Files[monitorsDTarget] = monitorsD - case mage.Deb, mage.RPM, mage.DMG: - args.Spec.Files[unixMonitorsDir] = monitorsD - } - } -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(heartbeat.Update.All) } diff --git a/heartbeat/monitors/active/dialchain/fields.go b/heartbeat/monitors/active/dialchain/fields.go new file mode 100644 index 00000000000..6b4a59a9664 --- /dev/null +++ b/heartbeat/monitors/active/dialchain/fields.go @@ -0,0 +1,35 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated by beats/dev-tools/cmd/asset/asset.go - DO NOT EDIT. + +package dialchain + +import ( + "github.com/elastic/beats/libbeat/asset" +) + +func init() { + if err := asset.SetFields("heartbeat", "dialchain", asset.ModuleFieldsPri, Asset); err != nil { + panic(err) + } +} + +// Asset returns asset data +func Asset() string { + return "eJzMU7ty2zAQ7PkVO25SWZ0bFWmSVPFMCqXXnICjeSMIYA5HOfz7DGjJBvWInZlkYlQkHrt7d7u32PK4RE5um+8awMQCL3Gz+vbp6+oOvaaf400DeM5OpTdJcdkArXDwuXwBt4i04wqjLBt7XuJB09AfdmoEfDxsAjURlAMZ+xk8Tv5eCNXsee8S429Yy/p+v0KgkRWahuhhKj1MdpwX1b1T8lqASzGys9nZNSGviJkEyY6h/GMQZQ9L4Gy0CZI70JFLUsRe6Ni2cin6Pkk8VQFsKLNHiqA9SaBN4BrE0qz1i5Pnl8quSx/y2dGx8pDiw4XDWfGfB6VJhkTsxGnK7FL0uWmagyMt5MqOZVYcnY7T+6exvdGWT0CXB3NlIJUzZoZcvGJIx2rSiiPjdUy23lMQv95wm5TPnOrJ+JpRv5AG4WyTHUGGx05cB+vqEX7INWEGKWNiXLxRGLXG+ke67gvRX1P13/PbUfS5oy3/swS3Ekt8i9RnsiqYQZn8WAU0sj0m3Z4Bv/T33QT1VwAAAP///Ee+XQ==" +} diff --git a/heartbeat/monitors/active/http/fields.go b/heartbeat/monitors/active/http/fields.go new file mode 100644 index 00000000000..3866452500d --- /dev/null +++ b/heartbeat/monitors/active/http/fields.go @@ -0,0 +1,35 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated by beats/dev-tools/cmd/asset/asset.go - DO NOT EDIT. + +package http + +import ( + "github.com/elastic/beats/libbeat/asset" +) + +func init() { + if err := asset.SetFields("heartbeat", "http", asset.ModuleFieldsPri, Asset); err != nil { + panic(err) + } +} + +// Asset returns asset data +func Asset() string { + return "eJzsVU1v2zoQvPtXDHJ+0Q/w4V3eK9BTESC+G7S4sghTS3W5sqOiP76gvixbjoMWKZAC1cngx+54Znb4iAO1a5Sq9QpQp57WePi82TyhCuw0yMMKsBRzcbW6wOsVUDjyNqZfwCPYVDRVSJ+2Na2xl9CMK/P7+HdYBLo2Qt4o2aFoNmzOW8zbNOKntbGT0ovOFl9plr5nkqPLKVVBE8li145/M5sdrBqvbnsN4QxCzOmibA/jQO0piL3YuYMF2JSEeAcRNqWLcBEGHPjRsPHtt5EpaGk0bTaRisZflS6CwOz3QnuTesdsQaSoLoicS/YG+k46b1oSSGjYQsXVUFdRnDO55PCM4Gi8s0bpCvptKAs43682gf8b6f4sdqQnIkbhJCp2rRJCMZrta0NpkRzvcRKnSgzDdlFNKNaBIw1Hawk5xUGgAXmS6NzUpM3AMF7I2HZR0ByN82bnCUx6CnJAHpgpT7ez1eL4l6C0hivOzZLYHBSpfIK0C7ZFEAT2LQxqocK9/AMtXVxU6z9uqh0JbKC+UtF430KoForECi0JGtT4TkgwkaUlM4NMoQPS3UlIsmsH3lAe80G+BbLX3gfe39i8kH/i3TEql0uIlAe2cXXXatuE9N39FuYiJYc5ITsRZNhO/Tu2RmctCvY0/nUCfqMT0sjTdoiBX3LCXR9E4k6RxEUeqtqT0kXy3EiMZVL0CfJGYnxgkkeHb0syluR9aN4kI07TNWb8nPCoRvRahUT+jXDvn4E0m7NXopBQjWpdpH/S46zccnCvsv/PVS4PrMSaLZq+2vAnNOuCQsXRkewwIV23CRoGbNltcF0gvXt6z+ENr/xkHKgYjqbXDc/JXxEnp+WiXLrh2KkzHpv/nmZ6w6hSVWuGT2z72zCFkpzzfFHNOou8pPxw8WB85Lfho7j6RwAAAP//fI+IfA==" +} diff --git a/heartbeat/monitors/active/icmp/fields.go b/heartbeat/monitors/active/icmp/fields.go new file mode 100644 index 00000000000..da9290c97b1 --- /dev/null +++ b/heartbeat/monitors/active/icmp/fields.go @@ -0,0 +1,35 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated by beats/dev-tools/cmd/asset/asset.go - DO NOT EDIT. + +package icmp + +import ( + "github.com/elastic/beats/libbeat/asset" +) + +func init() { + if err := asset.SetFields("heartbeat", "icmp", asset.ModuleFieldsPri, Asset); err != nil { + panic(err) + } +} + +// Asset returns asset data +func Asset() string { + return "eJx0jzFuwzAMRXed4iN7cgAPXdoOGVoEuYErMS5Ri1IlavDtCwGyodgoJ+ITfI8844eWAWx9NICyzjTgdH39uJ0M4CjbxFE5yGCAB9Pscu2AM2T0tG3W0iXSgCmFsib9Pl5aCFxviCxT411a3tN7Q6LfQlnzNlhNLEoTpS7/x1frs/gvSuAH6nN4t9/h3sDIJO5ijmLVg7P/7mDcyGhojOJwpzgvSKGIgyaOUPbUIfZ/9yeU/BSvV8xBpt3g6ZC3ksbagQWebQqZbBCXzV8AAAD//5VWiFU=" +} diff --git a/heartbeat/monitors/active/tcp/fields.go b/heartbeat/monitors/active/tcp/fields.go new file mode 100644 index 00000000000..7763803b532 --- /dev/null +++ b/heartbeat/monitors/active/tcp/fields.go @@ -0,0 +1,35 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// Code generated by beats/dev-tools/cmd/asset/asset.go - DO NOT EDIT. + +package tcp + +import ( + "github.com/elastic/beats/libbeat/asset" +) + +func init() { + if err := asset.SetFields("heartbeat", "tcp", asset.ModuleFieldsPri, Asset); err != nil { + panic(err) + } +} + +// Asset returns asset data +func Asset() string { + return "eJzMkLFSwzAMhvc8xX/d2wfIwAILW+/gBZRYDbo6dpCVQt6ec0lCStrrwoCnRLL16fu3OPJQwuquAEzMc4nN6+MengbWTQE4TrVKZxJDWQAHYe9S/gK2CNTy9DofGzou0Wjsp8ryOR7GIpARge0j6vEbBWVPxm4E7MabS9wS2UW1uThxJRg3rIv6DXo+L6wnqfk8CaFvK9ZdseKorTFLvTuQOUlo7IODqXQwaXkWvCa5XKCOIXBtF71bi9xZJp+nXin3oPzei7KDRXAyqrykN9B545GZr1WU2CEGkFcmN6wG0onEU+UZz3uQc8rpwu2W39KxT6vWpOhjaK40LyxnJwlopdaYuI7BpeJqoCfy4sj4zxONh2l2/kvG3U96/CnJJDS/4v0vQX0FAAD//xp3EWM=" +} diff --git a/heartbeat/scripts/generate_imports_helper.py b/heartbeat/scripts/generate_imports_helper.py deleted file mode 100644 index 9a7e7ecd5cd..00000000000 --- a/heartbeat/scripts/generate_imports_helper.py +++ /dev/null @@ -1,39 +0,0 @@ -comment = """Package defaults imports all Monitor packages so that they -register with the global monitor registry. This package can be imported in the -main package to automatically register all of the standard supported Heartbeat -modules.""" - -from os.path import abspath, isdir, join -from os import listdir - - -blacklist = [ - "monitors/active/dialchain" -] - - -def get_importable_lines(go_beat_path, import_line): - def format(package, name): - return import_line.format( - beat_path=go_beat_path, - module=package, - name=name) - - def imports(mode): - package = "monitors/{}".format(mode) - return [format(package, m) for m in collect_monitors(package)] - - return sorted(imports("active") + imports("passive")) - - -def collect_monitors(package): - path = abspath(package) - if not isdir(path): - return [] - return [m for m in listdir(path) if is_monitor(package, m)] - - -def is_monitor(package, name): - return (name != "_meta" and - isdir(join(abspath(package), name)) and - "{}/{}".format(package, name) not in blacklist) diff --git a/heartbeat/scripts/mage/config.go b/heartbeat/scripts/mage/config.go new file mode 100644 index 00000000000..4a74d982fa3 --- /dev/null +++ b/heartbeat/scripts/mage/config.go @@ -0,0 +1,44 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/elastic/beats/dev-tools/mage" +) + +// config generates short/reference/docker configs. +func config() error { + return mage.Config(mage.AllConfigTypes, configFileParams(), ".") +} + +func configFileParams() mage.ConfigFileParams { + return mage.ConfigFileParams{ + ShortParts: []string{ + mage.OSSBeatDir("_meta/beat.yml"), + mage.LibbeatDir("_meta/config.yml"), + }, + ReferenceParts: []string{ + mage.OSSBeatDir("_meta/beat.reference.yml"), + mage.LibbeatDir("_meta/config.reference.yml"), + }, + DockerParts: []string{ + mage.OSSBeatDir("_meta/beat.docker.yml"), + mage.LibbeatDir("_meta/config.docker.yml"), + }, + } +} diff --git a/heartbeat/scripts/mage/fields.go b/heartbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..2140b10ab1c --- /dev/null +++ b/heartbeat/scripts/mage/fields.go @@ -0,0 +1,87 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + "go.uber.org/multierr" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { + switch SelectLogic { + case mage.OSSProject: + return multierr.Combine( + b.commonFieldsGo(), + b.moduleFieldsGo(), + ) + case mage.XPackProject: + // No X-Pack specific content. + return nil + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (b fieldsBuilder) FieldsYML() error { + var modules []string + switch SelectLogic { + case mage.OSSProject, mage.XPackProject: + modules = append(modules, mage.OSSBeatDir("monitors/active")) + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (b fieldsBuilder) FieldsAllYML() error { + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML, + mage.OSSBeatDir("monitors/active"), + ) +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} + +// monitorFieldsGo generates a fields.go for each monitor. +func (b fieldsBuilder) moduleFieldsGo() error { + return mage.GenerateModuleFieldsGo("monitors/active") +} diff --git a/heartbeat/scripts/mage/package.go b/heartbeat/scripts/mage/package.go new file mode 100644 index 00000000000..08f765105f0 --- /dev/null +++ b/heartbeat/scripts/mage/package.go @@ -0,0 +1,86 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "fmt" + "time" + + "github.com/magefile/mage/mg" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" +) + +func init() { + mage.BeatDescription = "Ping remote services for availability and log " + + "results to Elasticsearch or send to Logstash." + mage.BeatServiceName = "heartbeat-elastic" +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() + + switch SelectLogic { + case mage.OSSProject: + mage.UseElasticBeatOSSPackaging() + case mage.XPackProject: + mage.UseElasticBeatXPackPackaging() + } + mage.PackageKibanaDashboardsFromBuildDir() + customizePackaging() + + mg.SerialDeps(Update.All) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} + +// customizePackaging modifies the package specs include a modules.d directory. +func customizePackaging() { + monitorsDTarget := "monitors.d" + unixMonitorsDir := "/etc/{{.BeatName}}/monitors.d" + monitorsD := mage.PackageFile{ + Mode: 0644, + Source: mage.OSSBeatDir("monitors.d"), + } + + for _, args := range mage.Packages { + for _, pkgType := range args.Types { + switch pkgType { + case mage.Docker: + args.Spec.ExtraVar("linux_capabilities", "cap_net_raw=eip") + args.Spec.Files[monitorsDTarget] = monitorsD + case mage.TarGz, mage.Zip: + args.Spec.Files[monitorsDTarget] = monitorsD + case mage.Deb, mage.RPM, mage.DMG: + args.Spec.Files[unixMonitorsDir] = monitorsD + default: + panic(errors.Errorf("unknown package type: %v", pkgType)) + } + + break + } + } +} diff --git a/heartbeat/scripts/mage/update.go b/heartbeat/scripts/mage/update.go new file mode 100644 index 00000000000..c885d3224b4 --- /dev/null +++ b/heartbeat/scripts/mage/update.go @@ -0,0 +1,77 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/integtest" + "github.com/elastic/beats/dev-tools/mage/target/unittest" +) + +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + unittest.RegisterGoTestDeps(Update.Fields) + unittest.RegisterPythonTestDeps(Update.Fields) + + integtest.RegisterPythonTestDeps(Update.Fields) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Dashboards, Update.Config, Update.Includes) +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + return mage.KibanaDashboards(mage.OSSBeatDir("monitors/active")) +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// Includes updates include/list.go. +func (Update) Includes() error { + if SelectLogic != mage.OSSProject { + return nil + } + return mage.GenerateIncludeListGo([]string{"monitors/active/*"}, nil) +} diff --git a/x-pack/heartbeat/Makefile b/x-pack/heartbeat/Makefile new file mode 100644 index 00000000000..7427a5c672b --- /dev/null +++ b/x-pack/heartbeat/Makefile @@ -0,0 +1,4 @@ +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/heartbeat/heartbeat.docker.yml b/x-pack/heartbeat/heartbeat.docker.yml new file mode 100644 index 00000000000..2302283f674 --- /dev/null +++ b/x-pack/heartbeat/heartbeat.docker.yml @@ -0,0 +1,31 @@ +# Define a directory to load monitor definitions from. Definitions take the form +# of individual yaml files. +heartbeat.config.monitors: + # Directory + glob pattern to search for configuration files + path: ${path.config}/monitors.d/*.yml + # If enabled, heartbeat will periodically check the config.monitors path for changes + reload.enabled: false + # How often to check for changes + reload.period: 5s + + +heartbeat.monitors: +- type: http + schedule: '@every 5s' + urls: + - http://elasticsearch:9200 + - http://kibana:5601 + +- type: icmp + schedule: '@every 5s' + hosts: + - elasticsearch + - kibana + +processors: +- add_cloud_metadata: ~ + +output.elasticsearch: + hosts: '${ELASTICSEARCH_HOSTS:elasticsearch:9200}' + username: '${ELASTICSEARCH_USERNAME:}' + password: '${ELASTICSEARCH_PASSWORD:}' diff --git a/x-pack/heartbeat/heartbeat.reference.yml b/x-pack/heartbeat/heartbeat.reference.yml new file mode 100644 index 00000000000..99ccee877a8 --- /dev/null +++ b/x-pack/heartbeat/heartbeat.reference.yml @@ -0,0 +1,1397 @@ +################### Heartbeat Configuration Example ######################### + +# This file is a full configuration example documenting all non-deprecated +# options in comments. For a shorter configuration example, that contains +# only some common options, please see heartbeat.yml in the same directory. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/heartbeat/index.html + +############################# Heartbeat ###################################### + + +# Define a directory to load monitor definitions from. Definitions take the form +# of individual yaml files. +heartbeat.config.monitors: + # Directory + glob pattern to search for configuration files + path: ${path.config}/monitors.d/*.yml + # If enabled, heartbeat will periodically check the config.monitors path for changes + reload.enabled: false + # How often to check for changes + reload.period: 5s + +# Configure monitors +heartbeat.monitors: +- type: icmp # monitor type `icmp` (requires root) uses ICMP Echo Request to ping + # configured hosts + + # Monitor name used for job name and document type. + #name: icmp + + # Enable/Disable monitor + #enabled: true + + # Configure task schedule using cron-like syntax + schedule: '*/5 * * * * * *' # exactly every 5 seconds like 10:00:00, 10:00:05, ... + + # List of hosts to ping + hosts: ["localhost"] + + # Configure IP protocol types to ping on if hostnames are configured. + # Ping all resolvable IPs if `mode` is `all`, or only one IP if `mode` is `any`. + ipv4: true + ipv6: true + mode: any + + # Total running time per ping test. + timeout: 16s + + # Waiting duration until another ICMP Echo Request is emitted. + wait: 1s + + # The tags of the monitors are included in their own field with each + # transaction published. Tags make it easy to group servers by different + # logical properties. + #tags: ["service-X", "web-tier"] + + # Optional fields that you can specify to add additional information to the + # monitor output. Fields can be scalar values, arrays, dictionaries, or any nested + # combination of these. + #fields: + # env: staging + + # If this option is set to true, the custom fields are stored as top-level + # fields in the output document instead of being grouped under a fields + # sub-dictionary. Default is false. + #fields_under_root: false + + # NOTE: THIS FEATURE IS DEPRECATED AND WILL BE REMOVED IN A FUTURE RELEASE + # Configure file json file to be watched for changes to the monitor: + #watch.poll_file: + # Path to check for updates. + #path: + + # Interval between file file changed checks. + #interval: 5s + +# Define a directory to load monitor definitions from. Definitions take the form +# of individual yaml files. +# heartbeat.config.monitors: + # Directory + glob pattern to search for configuration files + #path: /path/to/my/monitors.d/*.yml + # If enabled, heartbeat will periodically check the config.monitors path for changes + #reload.enabled: true + # How often to check for changes + #reload.period: 1s + +- type: tcp # monitor type `tcp`. Connect via TCP and optionally verify endpoint + # by sending/receiving a custom payload + + # Monitor name used for job name and document type + #name: tcp + + # Enable/Disable monitor + #enabled: true + + # Configure task schedule + schedule: '@every 5s' # every 5 seconds from start of beat + + # configure hosts to ping. + # Entries can be: + # - plain host name or IP like `localhost`: + # Requires ports configs to be checked. If ssl is configured, + # a SSL/TLS based connection will be established. Otherwise plain tcp connection + # will be established + # - hostname + port like `localhost:12345`: + # Connect to port on given host. If ssl is configured, + # a SSL/TLS based connection will be established. Otherwise plain tcp connection + # will be established + # - full url syntax. `scheme://:[port]`. The `` can be one of + # `tcp`, `plain`, `ssl` and `tls`. If `tcp`, `plain` is configured, a plain + # tcp connection will be established, even if ssl is configured. + # Using `tls`/`ssl`, an SSL connection is established. If no ssl is configured, + # system defaults will be used (not supported on windows). + # If `port` is missing in url, the ports setting is required. + hosts: ["localhost:9200"] + + # Configure IP protocol types to ping on if hostnames are configured. + # Ping all resolvable IPs if `mode` is `all`, or only one IP if `mode` is `any`. + ipv4: true + ipv6: true + mode: any + + # List of ports to ping if host does not contain a port number + # ports: [80, 9200, 5044] + + # Total test connection and data exchange timeout + #timeout: 16s + + # Optional payload string to send to remote and expected answer. If none is + # configured, the endpoint is expected to be up if connection attempt was + # successful. If only `send_string` is configured, any response will be + # accepted as ok. If only `receive_string` is configured, no payload will be + # send, but client expects to receive expected payload on connect. + #check: + #send: '' + #receive: '' + + # SOCKS5 proxy url + # proxy_url: '' + + # Resolve hostnames locally instead on SOCKS5 server: + #proxy_use_local_resolver: false + + # TLS/SSL connection settings: + #ssl: + # Certificate Authorities + #certificate_authorities: [''] + + # Required TLS protocols + #supported_protocols: ["TLSv1.0", "TLSv1.1", "TLSv1.2"] + + # NOTE: THIS FEATURE IS DEPRECATED AND WILL BE REMOVED IN A FUTURE RELEASE + # Configure file json file to be watched for changes to the monitor: + #watch.poll_file: + # Path to check for updates. + #path: + + # Interval between file file changed checks. + #interval: 5s + + +- type: http # monitor type `http`. Connect via HTTP an optionally verify response + + # Monitor name used for job name and document type + #name: http + + # Enable/Disable monitor + #enabled: true + + # Configure task schedule + schedule: '@every 5s' # every 5 seconds from start of beat + + # Configure URLs to ping + urls: ["http://localhost:9200"] + + # Configure IP protocol types to ping on if hostnames are configured. + # Ping all resolvable IPs if `mode` is `all`, or only one IP if `mode` is `any`. + ipv4: true + ipv6: true + mode: any + + # Configure file json file to be watched for changes to the monitor: + #watch.poll_file: + # Path to check for updates. + #path: + + # Interval between file file changed checks. + #interval: 5s + + # Optional HTTP proxy url. + #proxy_url: '' + + # Total test connection and data exchange timeout + #timeout: 16s + + # Optional Authentication Credentials + #username: '' + #password: '' + + # TLS/SSL connection settings for use with HTTPS endpoint. If not configured + # system defaults will be used. + #ssl: + # Certificate Authorities + #certificate_authorities: [''] + + # Required TLS protocols + #supported_protocols: ["TLSv1.0", "TLSv1.1", "TLSv1.2"] + + # Request settings: + #check.request: + # Configure HTTP method to use. Only 'HEAD', 'GET' and 'POST' methods are allowed. + #method: "GET" + + # Dictionary of additional HTTP headers to send: + #headers: + + # Optional request body content + #body: + + # Expected response settings + #check.response: + # Expected status code. If not configured or set to 0 any status code not + # being 404 is accepted. + #status: 0 + + # Required response headers. + #headers: + + # Required response contents. + #body: + + # Parses the body as JSON, then checks against the given condition expression + #json: + #- description: Explanation of what the check does + # condition: + # equals: + # myField: expectedValue + + + # NOTE: THIS FEATURE IS DEPRECATED AND WILL BE REMOVED IN A FUTURE RELEASE + # Configure file json file to be watched for changes to the monitor: + #watch.poll_file: + # Path to check for updates. + #path: + + # Interval between file file changed checks. + #interval: 5s + + +heartbeat.scheduler: + # Limit number of concurrent tasks executed by heartbeat. The task limit if + # disabled if set to 0. The default is 0. + #limit: 0 + + # Set the scheduler it's timezone + #location: '' + +#================================ General ====================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +# If this options is not defined, the hostname is used. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. Tags make it easy to group servers by different +# logical properties. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. Fields can be scalar values, arrays, dictionaries, or any nested +# combination of these. +#fields: +# env: staging + +# If this option is set to true, the custom fields are stored as top-level +# fields in the output document instead of being grouped under a fields +# sub-dictionary. Default is false. +#fields_under_root: false + +# Internal queue configuration for buffering events to be published. +#queue: + # Queue type by name (default 'mem') + # The memory queue will present all available events (up to the outputs + # bulk_max_size) to the output, the moment the output is ready to server + # another batch of events. + #mem: + # Max number of events the queue can buffer. + #events: 4096 + + # Hints the minimum number of events stored in the queue, + # before providing a batch of events to the outputs. + # The default value is set to 2048. + # A value of 0 ensures events are immediately available + # to be sent to the outputs. + #flush.min_events: 2048 + + # Maximum duration after which events are available to the outputs, + # if the number of events stored in the queue is < min_flush_events. + #flush.timeout: 1s + + # The spool queue will store events in a local spool file, before + # forwarding the events to the outputs. + # + # Beta: spooling to disk is currently a beta feature. Use with care. + # + # The spool file is a circular buffer, which blocks once the file/buffer is full. + # Events are put into a write buffer and flushed once the write buffer + # is full or the flush_timeout is triggered. + # Once ACKed by the output, events are removed immediately from the queue, + # making space for new events to be persisted. + #spool: + # The file namespace configures the file path and the file creation settings. + # Once the file exists, the `size`, `page_size` and `prealloc` settings + # will have no more effect. + #file: + # Location of spool file. The default value is ${path.data}/spool.dat. + #path: "${path.data}/spool.dat" + + # Configure file permissions if file is created. The default value is 0600. + #permissions: 0600 + + # File size hint. The spool blocks, once this limit is reached. The default value is 100 MiB. + #size: 100MiB + + # The files page size. A file is split into multiple pages of the same size. The default value is 4KiB. + #page_size: 4KiB + + # If prealloc is set, the required space for the file is reserved using + # truncate. The default value is true. + #prealloc: true + + # Spool writer settings + # Events are serialized into a write buffer. The write buffer is flushed if: + # - The buffer limit has been reached. + # - The configured limit of buffered events is reached. + # - The flush timeout is triggered. + #write: + # Sets the write buffer size. + #buffer_size: 1MiB + + # Maximum duration after which events are flushed, if the write buffer + # is not full yet. The default value is 1s. + #flush.timeout: 1s + + # Number of maximum buffered events. The write buffer is flushed once the + # limit is reached. + #flush.events: 16384 + + # Configure the on-disk event encoding. The encoding can be changed + # between restarts. + # Valid encodings are: json, ubjson, and cbor. + #codec: cbor + #read: + # Reader flush timeout, waiting for more events to become available, so + # to fill a complete batch, as required by the outputs. + # If flush_timeout is 0, all available events are forwarded to the + # outputs immediately. + # The default value is 0s. + #flush.timeout: 0s + +# Sets the maximum number of CPUs that can be executing simultaneously. The +# default is the number of logical CPUs available in the system. +#max_procs: + +#================================ Processors =================================== + +# Processors are used to reduce the number of fields in the exported event or to +# enhance the event with external metadata. This section defines a list of +# processors that are applied one by one and the first one receives the initial +# event: +# +# event -> filter1 -> event1 -> filter2 ->event2 ... +# +# The supported processors are drop_fields, drop_event, include_fields, +# decode_json_fields, and add_cloud_metadata. +# +# For example, you can use the following processors to keep the fields that +# contain CPU load percentages, but remove the fields that contain CPU ticks +# values: +# +#processors: +#- include_fields: +# fields: ["cpu"] +#- drop_fields: +# fields: ["cpu.user", "cpu.system"] +# +# The following example drops the events that have the HTTP response code 200: +# +#processors: +#- drop_event: +# when: +# equals: +# http.code: 200 +# +# The following example renames the field a to b: +# +#processors: +#- rename: +# fields: +# - from: "a" +# to: "b" +# +# The following example tokenizes the string into fields: +# +#processors: +#- dissect: +# tokenizer: "%{key1} - %{key2}" +# field: "message" +# target_prefix: "dissect" +# +# The following example enriches each event with metadata from the cloud +# provider about the host machine. It works on EC2, GCE, DigitalOcean, +# Tencent Cloud, and Alibaba Cloud. +# +#processors: +#- add_cloud_metadata: ~ +# +# The following example enriches each event with the machine's local time zone +# offset from UTC. +# +#processors: +#- add_locale: +# format: offset +# +# The following example enriches each event with docker metadata, it matches +# given fields to an existing container id and adds info from that container: +# +#processors: +#- add_docker_metadata: +# host: "unix:///var/run/docker.sock" +# match_fields: ["system.process.cgroup.id"] +# match_pids: ["process.pid", "process.ppid"] +# match_source: true +# match_source_index: 4 +# match_short_id: false +# cleanup_timeout: 60 +# labels.dedot: false +# # To connect to Docker over TLS you must specify a client and CA certificate. +# #ssl: +# # certificate_authority: "/etc/pki/root/ca.pem" +# # certificate: "/etc/pki/client/cert.pem" +# # key: "/etc/pki/client/cert.key" +# +# The following example enriches each event with docker metadata, it matches +# container id from log path available in `source` field (by default it expects +# it to be /var/lib/docker/containers/*/*.log). +# +#processors: +#- add_docker_metadata: ~ +# +# The following example enriches each event with host metadata. +# +#processors: +#- add_host_metadata: +# netinfo.enabled: false +# +# The following example enriches each event with process metadata using +# process IDs included in the event. +# +#processors: +#- add_process_metadata: +# match_pids: ["system.process.ppid"] +# target: system.process.parent +# +# The following example decodes fields containing JSON strings +# and replaces the strings with valid JSON objects. +# +#processors: +#- decode_json_fields: +# fields: ["field1", "field2", ...] +# process_array: false +# max_depth: 1 +# target: "" +# overwrite_keys: false + +#============================= Elastic Cloud ================================== + +# These settings simplify using heartbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ====================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------- +output.elasticsearch: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + #ilm.rollover_alias: "heartbeat" + #ilm.pattern: "{now/d}-000001" + + # Set gzip compression level. + #compression_level: 0 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Number of workers per Elasticsearch host. + #worker: 1 + + # Optional index name. The default is "heartbeat" plus date + # and generates [heartbeat-]YYYY.MM.DD keys. + # In case you modify this pattern you must update setup.template.name and setup.template.pattern accordingly. + #index: "heartbeat-%{[beat.version]}-%{+yyyy.MM.dd}" + + # Optional ingest node pipeline. By default no pipeline will be used. + #pipeline: "" + + # Optional HTTP Path + #path: "/elasticsearch" + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing a request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + +#----------------------------- Logstash output --------------------------------- +#output.logstash: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Number of workers per Logstash host. + #worker: 1 + + # Set gzip compression level. + #compression_level: 3 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional maximum time to live for a connection to Logstash, after which the + # connection will be re-established. A value of `0s` (the default) will + # disable this feature. + # + # Not yet supported for async connections (i.e. with the "pipelining" option set) + #ttl: 30s + + # Optional load balance the events between the Logstash hosts. Default is false. + #loadbalance: false + + # Number of batches to be sent asynchronously to Logstash while processing + # new batches. + #pipelining: 2 + + # If enabled only a subset of events in a batch of events is transferred per + # transaction. The number of events to be sent increases up to `bulk_max_size` + # if no error is encountered. + #slow_start: false + + # The number of seconds to wait before trying to reconnect to Logstash + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Logstash after a network error. The default is 60s. + #backoff.max: 60s + + # Optional index name. The default index name is set to heartbeat + # in all lowercase. + #index: 'heartbeat' + + # SOCKS5 proxy server URL + #proxy_url: socks5://user:password@socks5-server:2233 + + # Resolve names locally when using a proxy server. Defaults to false. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat and Winlogbeat, ignore the max_retries setting + # and retry until all events are published. Set max_retries to a value less + # than 0 to retry until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Logstash request. The + # default is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Logstash server before + # timing out. The default is 30s. + #timeout: 30s + +#------------------------------- Kafka output ---------------------------------- +#output.kafka: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The list of Kafka broker addresses from where to fetch the cluster metadata. + # The cluster metadata contain the actual Kafka brokers events are published + # to. + #hosts: ["localhost:9092"] + + # The Kafka topic used for produced events. The setting can be a format string + # using any event field. To set the topic from document type use `%{[type]}`. + #topic: beats + + # The Kafka event key setting. Use format string to create unique event key. + # By default no event key will be generated. + #key: '' + + # The Kafka event partitioning strategy. Default hashing strategy is `hash` + # using the `output.kafka.key` setting or randomly distributes events if + # `output.kafka.key` is not configured. + #partition.hash: + # If enabled, events will only be published to partitions with reachable + # leaders. Default is false. + #reachable_only: false + + # Configure alternative event field names used to compute the hash value. + # If empty `output.kafka.key` setting will be used. + # Default value is empty list. + #hash: [] + + # Authentication details. Password is required if username is set. + #username: '' + #password: '' + + # Kafka version heartbeat is assumed to run against. Defaults to the "1.0.0". + #version: '1.0.0' + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Metadata update configuration. Metadata do contain leader information + # deciding which broker to use when publishing. + #metadata: + # Max metadata request retry attempts when cluster is in middle of leader + # election. Defaults to 3 retries. + #retry.max: 3 + + # Waiting time between retries during leader elections. Default is 250ms. + #retry.backoff: 250ms + + # Refresh metadata interval. Defaults to every 10 minutes. + #refresh_frequency: 10m + + # The number of concurrent load-balanced Kafka output workers. + #worker: 1 + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Kafka request. The default + # is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Kafka brokers before + # timing out. The default is 30s. + #timeout: 30s + + # The maximum duration a broker will wait for number of required ACKs. The + # default is 10s. + #broker_timeout: 10s + + # The number of messages buffered for each Kafka broker. The default is 256. + #channel_buffer_size: 256 + + # The keep-alive period for an active network connection. If 0s, keep-alives + # are disabled. The default is 0 seconds. + #keep_alive: 0 + + # Sets the output compression codec. Must be one of none, snappy and gzip. The + # default is gzip. + #compression: gzip + + # Set the compression level. Currently only gzip provides a compression level + # between 0 and 9. The default value is chosen by the compression algorithm. + #compression_level: 4 + + # The maximum permitted size of JSON-encoded messages. Bigger messages will be + # dropped. The default value is 1000000 (bytes). This value should be equal to + # or less than the broker's message.max.bytes. + #max_message_bytes: 1000000 + + # The ACK reliability level required from broker. 0=no response, 1=wait for + # local commit, -1=wait for all replicas to commit. The default is 1. Note: + # If set to 0, no ACKs are returned by Kafka. Messages might be lost silently + # on error. + #required_acks: 1 + + # The configurable ClientID used for logging, debugging, and auditing + # purposes. The default is "beats". + #client_id: beats + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- Redis output ---------------------------------- +#output.redis: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # The list of Redis servers to connect to. If load balancing is enabled, the + # events are distributed to the servers in the list. If one server becomes + # unreachable, the events are distributed to the reachable servers only. + #hosts: ["localhost:6379"] + + # The Redis port to use if hosts does not contain a port number. The default + # is 6379. + #port: 6379 + + # The name of the Redis list or channel the events are published to. The + # default is heartbeat. + #key: heartbeat + + # The password to authenticate with. The default is no authentication. + #password: + + # The Redis database number where the events are published. The default is 0. + #db: 0 + + # The Redis data type to use for publishing events. If the data type is list, + # the Redis RPUSH command is used. If the data type is channel, the Redis + # PUBLISH command is used. The default value is list. + #datatype: list + + # The number of workers to use for each host configured to publish events to + # Redis. Use this setting along with the loadbalance option. For example, if + # you have 2 hosts and 3 workers, in total 6 workers are started (3 for each + # host). + #worker: 1 + + # If set to true and multiple hosts or workers are configured, the output + # plugin load balances published events onto all Redis hosts. If set to false, + # the output plugin sends all events to only one host (determined at random) + # and will switch to another host if the currently selected one becomes + # unreachable. The default value is true. + #loadbalance: true + + # The Redis connection timeout in seconds. The default is 5 seconds. + #timeout: 5s + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The number of seconds to wait before trying to reconnect to Redis + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Redis after a network error. The default is 60s. + #backoff.max: 60s + + # The maximum number of events to bulk in a single Redis request or pipeline. + # The default is 2048. + #bulk_max_size: 2048 + + # The URL of the SOCKS5 proxy to use when connecting to the Redis servers. The + # value must be a URL with a scheme of socks5://. + #proxy_url: + + # This option determines whether Redis hostnames are resolved locally when + # using a proxy. The default value is false, which means that name resolution + # occurs on the proxy server. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- File output ----------------------------------- +#output.file: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Path to the directory where to save the generated files. The option is + # mandatory. + #path: "/tmp/heartbeat" + + # Name of the generated files. The default is `heartbeat` and it generates + # files: `heartbeat`, `heartbeat.1`, `heartbeat.2`, etc. + #filename: heartbeat + + # Maximum size in kilobytes of each file. When this size is reached, and on + # every heartbeat restart, the files are rotated. The default value is 10240 + # kB. + #rotate_every_kb: 10000 + + # Maximum number of files under path. When this number of files is reached, + # the oldest file is deleted and the rest are shifted from last to first. The + # default is 7 files. + #number_of_files: 7 + + # Permissions to use for file creation. The default is 0600. + #permissions: 0600 + + +#----------------------------- Console output --------------------------------- +#output.console: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + +#================================= Paths ====================================== + +# The home path for the heartbeat installation. This is the default base path +# for all other path settings and for miscellaneous files that come with the +# distribution (for example, the sample dashboards). +# If not set by a CLI flag or in the configuration file, the default for the +# home path is the location of the binary. +#path.home: + +# The configuration path for the heartbeat installation. This is the default +# base path for configuration files, including the main YAML configuration file +# and the Elasticsearch template file. If not set by a CLI flag or in the +# configuration file, the default for the configuration path is the home path. +#path.config: ${path.home} + +# The data path for the heartbeat installation. This is the default base path +# for all the files in which heartbeat needs to store its data. If not set by a +# CLI flag or in the configuration file, the default for the data path is a data +# subdirectory inside the home path. +#path.data: ${path.home}/data + +# The logs path for a heartbeat installation. This is the default location for +# the Beat's log files. If not set by a CLI flag or in the configuration file, +# the default for the logs path is a logs subdirectory inside the home path. +#path.logs: ${path.home}/logs + +#================================ Keystore ========================================== +# Location of the Keystore containing the keys and their sensitive values. +#keystore.path: "${path.config}/beats.keystore" + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards are disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The directory from where to read the dashboards. The default is the `kibana` +# folder in the home path. +#setup.dashboards.directory: ${path.home}/kibana + +# The URL from where to download the dashboards archive. It is used instead of +# the directory if it has a value. +#setup.dashboards.url: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the directory when it has a value. +#setup.dashboards.file: + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#setup.dashboards.beat: heartbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#setup.dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#setup.dashboards.index: + +# Always use the Kibana API for loading the dashboards instead of autodetecting +# how to install the dashboards by first querying Elasticsearch. +#setup.dashboards.always_kibana: false + +# If true and Kibana is not reachable at the time when dashboards are loaded, +# it will retry to reconnect to Kibana instead of exiting with an error. +#setup.dashboards.retry.enabled: false + +# Duration interval between Kibana connection retries. +#setup.dashboards.retry.interval: 1s + +# Maximum number of retries before exiting with an error, 0 for unlimited retrying. +#setup.dashboards.retry.maximum: 0 + + +#============================== Template ===================================== + +# A template is used to set the mapping in Elasticsearch +# By default template loading is enabled and the template is loaded. +# These settings can be adjusted to load your own template or overwrite existing ones. + +# Set to false to disable template loading. +#setup.template.enabled: true + +# Template name. By default the template name is "heartbeat-%{[beat.version]}" +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.name: "heartbeat-%{[beat.version]}" + +# Template pattern. By default the template pattern is "-%{[beat.version]}-*" to apply to the default index settings. +# The first part is the version of the beat and then -* is used to match all daily indices. +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.pattern: "heartbeat-%{[beat.version]}-*" + +# Path to fields.yml file to generate the template +#setup.template.fields: "${path.config}/fields.yml" + +# A list of fields to be added to the template and Kibana index pattern. Also +# specify setup.template.overwrite: true to overwrite the existing template. +# This setting is experimental. +#setup.template.append_fields: +#- name: field_name +# type: field_type + +# Enable json template loading. If this is enabled, the fields.yml is ignored. +#setup.template.json.enabled: false + +# Path to the json template file +#setup.template.json.path: "${path.config}/template.json" + +# Name under which the template is stored in Elasticsearch +#setup.template.json.name: "" + +# Overwrite existing template +#setup.template.overwrite: false + +# Elasticsearch template settings +setup.template.settings: + + # A dictionary of settings to place into the settings.index dictionary + # of the Elasticsearch template. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html + #index: + #number_of_shards: 1 + #codec: best_compression + #number_of_routing_shards: 30 + + # A dictionary of settings for the _source field. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html + #_source: + #enabled: false + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Optional HTTP Path + #path: "" + + # Use SSL settings for HTTPS. Default is true. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + + +#================================ Logging ====================================== +# There are four options for the log output: file, stderr, syslog, eventlog +# The file output is the default. + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: info + +# Enable debug output for selected components. To enable all selectors use ["*"] +# Other available selectors are "beat", "publish", "service" +# Multiple selectors can be chained. +#logging.selectors: [ ] + +# Send all logging output to syslog. The default is false. +#logging.to_syslog: false + +# Send all logging output to Windows Event Logs. The default is false. +#logging.to_eventlog: false + +# If enabled, heartbeat periodically logs its internal metrics that have changed +# in the last period. For each metric that changed, the delta from the value at +# the beginning of the period is logged. Also, the total values for +# all non-zero internal metrics are logged on shutdown. The default is true. +#logging.metrics.enabled: true + +# The period after which to log the internal metrics. The default is 30s. +#logging.metrics.period: 30s + +# Logging to rotating files. Set logging.to_files to false to disable logging to +# files. +logging.to_files: true +logging.files: + # Configure the path where the logs are written. The default is the logs directory + # under the home path (the binary location). + #path: /var/log/heartbeat + + # The name of the files where the logs are written to. + #name: heartbeat + + # Configure log file size limit. If limit is reached, log file will be + # automatically rotated + #rotateeverybytes: 10485760 # = 10MB + + # Number of rotated log files to keep. Oldest files will be deleted first. + #keepfiles: 7 + + # The permissions mask to apply when rotating log files. The default value is 0600. + # Must be a valid Unix-style file permissions mask expressed in octal notation. + #permissions: 0600 + + # Enable log file rotation on time intervals in addition to size-based rotation. + # Intervals must be at least 1s. Values of 1m, 1h, 24h, 7*24h, 30*24h, and 365*24h + # are boundary-aligned with minutes, hours, days, weeks, months, and years as + # reported by the local system clock. All other intervals are calculated from the + # unix epoch. Defaults to disabled. + #interval: 0 + +# Set to true to log messages in json format. +#logging.json: false + + +#============================== Xpack Monitoring ===================================== +# heartbeat can export internal metrics to a central Elasticsearch monitoring cluster. +# This requires xpack monitoring to be enabled in Elasticsearch. +# The reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line, and leave the rest commented out. +#xpack.monitoring.elasticsearch: + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + #hosts: ["localhost:9200"] + + # Set gzip compression level. + #compression_level: 0 + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "beats_system" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing an request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + #metrics.period: 10s + #state.period: 1m + +#================================ HTTP Endpoint ====================================== +# Each beat can expose internal metrics through a HTTP endpoint. For security +# reasons the endpoint is disabled by default. This feature is currently experimental. +# Stats can be access through http://localhost:5066/stats . For pretty JSON output +# append ?pretty to the URL. + +# Defines if the HTTP endpoint is enabled. +#http.enabled: false + +# The HTTP endpoint will bind to this hostname or IP address. It is recommended to use only localhost. +#http.host: localhost + +# Port on which the HTTP endpoint will bind. Default is 5066. +#http.port: 5066 + +#============================= Process Security ================================ + +# Enable or disable seccomp system call filtering on Linux. Default is enabled. +#seccomp.enabled: true diff --git a/x-pack/heartbeat/heartbeat.yml b/x-pack/heartbeat/heartbeat.yml new file mode 100644 index 00000000000..eeb6a95475f --- /dev/null +++ b/x-pack/heartbeat/heartbeat.yml @@ -0,0 +1,164 @@ +################### Heartbeat Configuration Example ######################### + +# This file is an example configuration file highlighting only some common options. +# The heartbeat.reference.yml file in the same directory contains all the supported options +# with detailed comments. You can use it for reference. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/heartbeat/index.html + +############################# Heartbeat ###################################### + +# Define a directory to load monitor definitions from. Definitions take the form +# of individual yaml files. +heartbeat.config.monitors: + # Directory + glob pattern to search for configuration files + path: ${path.config}/monitors.d/*.yml + # If enabled, heartbeat will periodically check the config.monitors path for changes + reload.enabled: false + # How often to check for changes + reload.period: 5s + +# Configure monitors inline +heartbeat.monitors: +- type: http + + # List or urls to query + urls: ["http://localhost:9200"] + + # Configure task schedule + schedule: '@every 10s' + + # Total test connection and data exchange timeout + #timeout: 16s + +#==================== Elasticsearch template setting ========================== + +setup.template.settings: + index.number_of_shards: 1 + index.codec: best_compression + #_source.enabled: false + +#================================ General ===================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. +#fields: +# env: staging + + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#setup.dashboards.url: + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Kibana Space ID + # ID of the Kibana Space into which the dashboards should be loaded. By default, + # the Default Space will be used. + #space.id: + +#============================= Elastic Cloud ================================== + +# These settings simplify using heartbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ===================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------ +output.elasticsearch: + # Array of hosts to connect to. + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + +#----------------------------- Logstash output -------------------------------- +#output.logstash: + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Optional SSL. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + +#================================ Processors ===================================== + +# Configure processors to enhance or manipulate events generated by the beat. + +processors: + - add_host_metadata: ~ + - add_cloud_metadata: ~ + +#================================ Logging ===================================== + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: debug + +# At debug level, you can selectively enable logging only for some components. +# To enable all selectors use ["*"]. Examples of other selectors are "beat", +# "publish", "service". +#logging.selectors: ["*"] + +#============================== Xpack Monitoring =============================== +# heartbeat can export internal metrics to a central Elasticsearch monitoring +# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The +# reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line. +#xpack.monitoring.elasticsearch: diff --git a/x-pack/heartbeat/magefile.go b/x-pack/heartbeat/magefile.go new file mode 100644 index 00000000000..97951a8bc2f --- /dev/null +++ b/x-pack/heartbeat/magefile.go @@ -0,0 +1,32 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// +build mage + +package main + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + heartbeat "github.com/elastic/beats/heartbeat/scripts/mage" +) + +func init() { + heartbeat.SelectLogic = mage.XPackProject +} + +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(heartbeat.Update.All) } diff --git a/x-pack/heartbeat/make.bat b/x-pack/heartbeat/make.bat new file mode 100644 index 00000000000..81de1ba946f --- /dev/null +++ b/x-pack/heartbeat/make.bat @@ -0,0 +1,11 @@ +@echo off + +REM Windows wrapper for Mage (https://magefile.org/) that installs it +REM to %GOPATH%\bin from the Beats vendor directory. +REM +REM After running this once you may invoke mage.exe directly. + +WHERE mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage + +mage %* From d4b55c765c8d9e26c5d2bb6efe2f501ddc6cc662 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:55:25 -0500 Subject: [PATCH 06/18] Refactor journalbeat build logic --- journalbeat/Dockerfile | 22 +- journalbeat/Makefile | 19 +- journalbeat/cmd/root.go | 3 + journalbeat/docker-compose.yml | 8 + journalbeat/include/fields.go | 2 +- journalbeat/magefile.go | 219 +--- journalbeat/make.bat | 11 + journalbeat/scripts/mage/build.go | 166 +++ journalbeat/scripts/mage/config.go | 44 + journalbeat/scripts/mage/fields.go | 81 ++ journalbeat/scripts/mage/package.go | 56 + journalbeat/scripts/mage/update.go | 78 ++ x-pack/journalbeat/Makefile | 4 + x-pack/journalbeat/journalbeat.docker.yml | 11 + x-pack/journalbeat/journalbeat.reference.yml | 1187 ++++++++++++++++++ x-pack/journalbeat/journalbeat.yml | 170 +++ x-pack/journalbeat/magefile.go | 32 + x-pack/journalbeat/main.go | 2 - x-pack/journalbeat/make.bat | 11 + 19 files changed, 1897 insertions(+), 229 deletions(-) create mode 100644 journalbeat/docker-compose.yml create mode 100644 journalbeat/make.bat create mode 100644 journalbeat/scripts/mage/build.go create mode 100644 journalbeat/scripts/mage/config.go create mode 100644 journalbeat/scripts/mage/fields.go create mode 100644 journalbeat/scripts/mage/package.go create mode 100644 journalbeat/scripts/mage/update.go create mode 100644 x-pack/journalbeat/Makefile create mode 100644 x-pack/journalbeat/journalbeat.docker.yml create mode 100644 x-pack/journalbeat/journalbeat.reference.yml create mode 100644 x-pack/journalbeat/journalbeat.yml create mode 100644 x-pack/journalbeat/magefile.go create mode 100644 x-pack/journalbeat/make.bat diff --git a/journalbeat/Dockerfile b/journalbeat/Dockerfile index 2e8248ca149..7ac2c9c39ca 100644 --- a/journalbeat/Dockerfile +++ b/journalbeat/Dockerfile @@ -1,17 +1,13 @@ FROM golang:1.11.4 -MAINTAINER Noémi Ványi -RUN set -x && \ - apt-get update && \ - apt-get install -y --no-install-recommends \ - python-pip virtualenv libsystemd-dev libc6-dev-i386 gcc-arm-linux-gnueabi && \ - apt-get clean +RUN \ + apt-get update \ + && apt-get install -y --no-install-recommends \ + python-pip \ + virtualenv \ + libsystemd-dev \ + && rm -rf /var/lib/apt/lists/* +RUN pip install --upgrade pip RUN pip install --upgrade setuptools - -# Setup work environment -ENV JOURNALBEAT_PATH /go/src/github.com/elastic/beats/journalbeat - -RUN mkdir -p $JOURNALBEAT_PATH/build/coverage -WORKDIR $JOURNALBEAT_PATH -HEALTHCHECK CMD exit 0 +RUN pip install --upgrade docker-compose==1.21.0 diff --git a/journalbeat/Makefile b/journalbeat/Makefile index 62bf3778d21..0326f3e977d 100644 --- a/journalbeat/Makefile +++ b/journalbeat/Makefile @@ -1,15 +1,4 @@ -BEAT_NAME=journalbeat -BEAT_TITLE=Journalbeat -SYSTEM_TESTS=false -TEST_ENVIRONMENT=false -ES_BEATS?=.. - -# Path to the libbeat Makefile --include $(ES_BEATS)/libbeat/scripts/Makefile - -.PHONY: before-build -before-build: - -# Collects all dependencies and then calls update -.PHONY: collect -collect: +# +# Includes +# +include ../dev-tools/make/oss.mk diff --git a/journalbeat/cmd/root.go b/journalbeat/cmd/root.go index d1afa4fdfcc..1cb19399553 100644 --- a/journalbeat/cmd/root.go +++ b/journalbeat/cmd/root.go @@ -21,6 +21,9 @@ import ( "github.com/elastic/beats/journalbeat/beater" cmd "github.com/elastic/beats/libbeat/cmd" + + // Register includes. + _ "github.com/elastic/beats/journalbeat/include" ) // Name of this beat diff --git a/journalbeat/docker-compose.yml b/journalbeat/docker-compose.yml new file mode 100644 index 00000000000..05a8146803e --- /dev/null +++ b/journalbeat/docker-compose.yml @@ -0,0 +1,8 @@ +version: '2.1' +services: + beat: + build: ${PWD}/. + working_dir: /go/src/github.com/elastic/beats/journalbeat + volumes: + - ${PWD}/..:/go/src/github.com/elastic/beats/ + command: make diff --git a/journalbeat/include/fields.go b/journalbeat/include/fields.go index ced5aa54f0f..2540fdca18c 100644 --- a/journalbeat/include/fields.go +++ b/journalbeat/include/fields.go @@ -24,7 +24,7 @@ import ( ) func init() { - if err := asset.SetFields("journalbeat", "fields.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("journalbeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } diff --git a/journalbeat/magefile.go b/journalbeat/magefile.go index d8566ab9c74..673d7337088 100644 --- a/journalbeat/magefile.go +++ b/journalbeat/magefile.go @@ -20,209 +20,32 @@ package main import ( - "context" - "fmt" - "strings" - "time" - "github.com/magefile/mage/mg" - "github.com/magefile/mage/sh" - "github.com/pkg/errors" "github.com/elastic/beats/dev-tools/mage" -) -func init() { - mage.BeatDescription = "Journalbeat ships systemd journal entries to Elasticsearch or Logstash." - - mage.Platforms = mage.Platforms.Filter("linux !linux/ppc64 !linux/mips64") -} - -const ( - libsystemdDevPkgName = "libsystemd-dev" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + journalbeat "github.com/elastic/beats/journalbeat/scripts/mage" ) -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - if d, ok := deps[mage.Platform.Name]; ok { - mg.Deps(d) - } - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild(mage.ImageSelector(selectImage)) -} - -// CrossBuildXPack cross-builds the beat with XPack for all target platforms. -func CrossBuildXPack() error { - return mage.CrossBuildXPack(mage.ImageSelector(selectImage)) -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon(mage.ImageSelector(selectImage)) -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatPackaging() - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildXPack, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Update updates the generated files (aka make update). -func Update() error { - return sh.Run("make", "update") -} - -// Fields generates a fields.yml for the Beat. -func Fields() error { - return mage.GenerateFieldsYAML() -} - -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} - -// ----------------------------------------------------------------------------- -// Customizations specific to Journalbeat. -// - Install required headers on builders for different architectures. - -var ( - deps = map[string]func() error{ - "linux/386": installLinux386, - "linux/amd64": installLinuxAMD64, - "linux/arm64": installLinuxARM64, - "linux/armv5": installLinuxARMLE, - "linux/armv6": installLinuxARMLE, - "linux/armv7": installLinuxARMHF, - "linux/mips": installLinuxMIPS, - "linux/mipsle": installLinuxMIPSLE, - "linux/mips64le": installLinuxMIPS64LE, - "linux/ppc64le": installLinuxPPC64LE, - "linux/s390x": installLinuxS390X, - - // No deb packages - //"linux/ppc64": installLinuxPpc64, - //"linux/mips64": installLinuxMips64, - } -) - -func installLinuxAMD64() error { - return installDependencies(libsystemdDevPkgName, "") -} - -func installLinuxARM64() error { - return installDependencies(libsystemdDevPkgName+":arm64", "arm64") -} - -func installLinuxARMHF() error { - return installDependencies(libsystemdDevPkgName+":armhf", "armhf") -} - -func installLinuxARMLE() error { - return installDependencies(libsystemdDevPkgName+":armel", "armel") -} - -func installLinux386() error { - return installDependencies(libsystemdDevPkgName+":i386", "i386") -} - -func installLinuxMIPS() error { - return installDependencies(libsystemdDevPkgName+":mips", "mips") -} - -func installLinuxMIPS64LE() error { - return installDependencies(libsystemdDevPkgName+":mips64el", "mips64el") -} - -func installLinuxMIPSLE() error { - return installDependencies(libsystemdDevPkgName+":mipsel", "mipsel") -} - -func installLinuxPPC64LE() error { - return installDependencies(libsystemdDevPkgName+":ppc64el", "ppc64el") -} - -func installLinuxS390X() error { - return installDependencies(libsystemdDevPkgName+":s390x", "s390x") -} - -func installDependencies(pkg, arch string) error { - if arch != "" { - err := sh.Run("dpkg", "--add-architecture", arch) - if err != nil { - return errors.Wrap(err, "error while adding architecture") - } - } - - if err := sh.Run("apt-get", "update"); err != nil { - return err - } - - return sh.Run("apt-get", "install", "-y", "--no-install-recommends", pkg) +func init() { + journalbeat.SelectLogic = mage.OSSProject } -func selectImage(platform string) (string, error) { - tagSuffix := "main" - - switch { - case strings.HasPrefix(platform, "linux/arm"): - tagSuffix = "arm" - case strings.HasPrefix(platform, "linux/mips"): - tagSuffix = "mips" - case strings.HasPrefix(platform, "linux/ppc"): - tagSuffix = "ppc" - case platform == "linux/s390x": - tagSuffix = "s390x" - case strings.HasPrefix(platform, "linux"): - tagSuffix = "main-debian8" - } - - goVersion, err := mage.GoVersion() - if err != nil { - return "", err - } - - return mage.BeatsCrossBuildImage + ":" + goVersion + "-" + tagSuffix, nil -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(journalbeat.Update.All) } diff --git a/journalbeat/make.bat b/journalbeat/make.bat new file mode 100644 index 00000000000..81de1ba946f --- /dev/null +++ b/journalbeat/make.bat @@ -0,0 +1,11 @@ +@echo off + +REM Windows wrapper for Mage (https://magefile.org/) that installs it +REM to %GOPATH%\bin from the Beats vendor directory. +REM +REM After running this once you may invoke mage.exe directly. + +WHERE mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage + +mage %* diff --git a/journalbeat/scripts/mage/build.go b/journalbeat/scripts/mage/build.go new file mode 100644 index 00000000000..1295d38142a --- /dev/null +++ b/journalbeat/scripts/mage/build.go @@ -0,0 +1,166 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "strings" + + "github.com/magefile/mage/mg" + "github.com/magefile/mage/sh" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" +) + +// Build builds the Beat binary. +func Build() error { + return mage.Build(mage.DefaultBuildArgs()) +} + +// GolangCrossBuild build the Beat binary inside of the golang-builder. +// Do not use directly, use crossBuild instead. +func GolangCrossBuild() error { + mg.Deps(installCrossBuildDeps) + return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) +} + +// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). +func BuildGoDaemon() error { + return mage.BuildGoDaemon() +} + +// CrossBuild cross-builds the beat for all target platforms. +func CrossBuild() error { + return mage.CrossBuild(mage.ImageSelector(selectImage)) +} + +// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. +func CrossBuildGoDaemon() error { + return mage.CrossBuildGoDaemon() +} + +const ( + libsystemdDevPkgName = "libsystemd-dev" +) + +var ( + deps = map[string]func() error{ + "linux/386": installLinux386, + "linux/amd64": installLinuxAMD64, + "linux/arm64": installLinuxARM64, + "linux/armv5": installLinuxARMLE, + "linux/armv6": installLinuxARMLE, + "linux/armv7": installLinuxARMHF, + "linux/mips": installLinuxMIPS, + "linux/mipsle": installLinuxMIPSLE, + "linux/mips64le": installLinuxMIPS64LE, + "linux/ppc64le": installLinuxPPC64LE, + "linux/s390x": installLinuxS390X, + + // No deb packages available for these architectures. + //"linux/ppc64": installLinuxPpc64, + //"linux/mips64": installLinuxMips64, + } +) + +func installCrossBuildDeps() { + if d, ok := deps[mage.Platform.Name]; ok { + mg.Deps(d) + } +} + +func installLinuxAMD64() error { + return installDependencies(libsystemdDevPkgName, "") +} + +func installLinuxARM64() error { + return installDependencies(libsystemdDevPkgName+":arm64", "arm64") +} + +func installLinuxARMHF() error { + return installDependencies(libsystemdDevPkgName+":armhf", "armhf") +} + +func installLinuxARMLE() error { + return installDependencies(libsystemdDevPkgName+":armel", "armel") +} + +func installLinux386() error { + return installDependencies(libsystemdDevPkgName+":i386", "i386") +} + +func installLinuxMIPS() error { + return installDependencies(libsystemdDevPkgName+":mips", "mips") +} + +func installLinuxMIPS64LE() error { + return installDependencies(libsystemdDevPkgName+":mips64el", "mips64el") +} + +func installLinuxMIPSLE() error { + return installDependencies(libsystemdDevPkgName+":mipsel", "mipsel") +} + +func installLinuxPPC64LE() error { + return installDependencies(libsystemdDevPkgName+":ppc64el", "ppc64el") +} + +func installLinuxS390X() error { + return installDependencies(libsystemdDevPkgName+":s390x", "s390x") +} + +func installDependencies(pkg, arch string) error { + if arch != "" { + err := sh.Run("dpkg", "--add-architecture", arch) + if err != nil { + return errors.Wrap(err, "error while adding architecture") + } + } + + if err := sh.Run("apt-get", "update"); err != nil { + return err + } + + return sh.Run("apt-get", "install", "-y", "--no-install-recommends", pkg) +} + +func selectImage(platform string) (string, error) { + tagSuffix := "main" + + switch { + case strings.HasPrefix(platform, "linux/arm"): + tagSuffix = "arm" + case strings.HasPrefix(platform, "linux/mips"): + tagSuffix = "mips" + case strings.HasPrefix(platform, "linux/ppc"): + tagSuffix = "ppc" + case platform == "linux/s390x": + tagSuffix = "s390x" + case strings.HasPrefix(platform, "linux"): + // This is the reason for the custom image selector. Use debian8 because + // it has a newer systemd version. + tagSuffix = "main-debian8" + } + + goVersion, err := mage.GoVersion() + if err != nil { + return "", err + } + + return mage.BeatsCrossBuildImage + ":" + goVersion + "-" + tagSuffix, nil +} diff --git a/journalbeat/scripts/mage/config.go b/journalbeat/scripts/mage/config.go new file mode 100644 index 00000000000..4bb85c08d52 --- /dev/null +++ b/journalbeat/scripts/mage/config.go @@ -0,0 +1,44 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/elastic/beats/dev-tools/mage" +) + +// config generates short/reference/docker configs. +func config() error { + return mage.Config(mage.AllConfigTypes, configFileParams(), ".") +} + +func configFileParams() mage.ConfigFileParams { + return mage.ConfigFileParams{ + ShortParts: []string{ + mage.OSSBeatDir("_meta/beat.yml"), + mage.LibbeatDir("_meta/config.yml"), + }, + ReferenceParts: []string{ + mage.OSSBeatDir("_meta/beat.yml"), + mage.LibbeatDir("_meta/config.reference.yml"), + }, + DockerParts: []string{ + mage.OSSBeatDir("_meta/beat.docker.yml"), + mage.LibbeatDir("_meta/config.docker.yml"), + }, + } +} diff --git a/journalbeat/scripts/mage/fields.go b/journalbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..c1b57a1792f --- /dev/null +++ b/journalbeat/scripts/mage/fields.go @@ -0,0 +1,81 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + "go.uber.org/multierr" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { + switch SelectLogic { + case mage.OSSProject: + return multierr.Combine( + b.commonFieldsGo(), + ) + case mage.XPackProject: + // No X-Pack content. + return nil + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (b fieldsBuilder) FieldsYML() error { + var modules []string + switch SelectLogic { + case mage.OSSProject: + // No modules. + case mage.XPackProject: + // No modules. + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (b fieldsBuilder) FieldsAllYML() error { + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML) +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} diff --git a/journalbeat/scripts/mage/package.go b/journalbeat/scripts/mage/package.go new file mode 100644 index 00000000000..212909f39dc --- /dev/null +++ b/journalbeat/scripts/mage/package.go @@ -0,0 +1,56 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "fmt" + "time" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" +) + +func init() { + mage.BeatDescription = "Journalbeat ships systemd journal entries to Elasticsearch or Logstash." + + mage.Platforms = mage.Platforms.Filter("linux !linux/ppc64 !linux/mips64") +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() + + switch SelectLogic { + case mage.OSSProject: + mage.UseElasticBeatOSSPackaging() + case mage.XPackProject: + mage.UseElasticBeatXPackPackaging() + } + mage.PackageKibanaDashboardsFromBuildDir() + + mg.Deps(Update.All) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} diff --git a/journalbeat/scripts/mage/update.go b/journalbeat/scripts/mage/update.go new file mode 100644 index 00000000000..b28249c2af7 --- /dev/null +++ b/journalbeat/scripts/mage/update.go @@ -0,0 +1,78 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/docs" + "github.com/elastic/beats/dev-tools/mage/target/integtest" + "github.com/elastic/beats/dev-tools/mage/target/unittest" +) + +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + docs.RegisterDeps(Update.FieldDocs) + + unittest.RegisterGoTestDeps(Update.Fields) + unittest.RegisterPythonTestDeps(Update.Fields) + + integtest.RegisterPythonTestDeps(Update.Fields, Update.Dashboards) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Dashboards, Update.Config, Update.FieldDocs) +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + return mage.KibanaDashboards() +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// FieldDocs updates the field documentation. +func (Update) FieldDocs() error { + mg.Deps(fb.FieldsAllYML) + return mage.Docs.FieldDocs(mage.FieldsAllYML) +} diff --git a/x-pack/journalbeat/Makefile b/x-pack/journalbeat/Makefile new file mode 100644 index 00000000000..7427a5c672b --- /dev/null +++ b/x-pack/journalbeat/Makefile @@ -0,0 +1,4 @@ +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/journalbeat/journalbeat.docker.yml b/x-pack/journalbeat/journalbeat.docker.yml new file mode 100644 index 00000000000..a1e67c0d961 --- /dev/null +++ b/x-pack/journalbeat/journalbeat.docker.yml @@ -0,0 +1,11 @@ +journalbeat.inputs: +- paths: [] + seek: cursor + +processors: +- add_cloud_metadata: ~ + +output.elasticsearch: + hosts: '${ELASTICSEARCH_HOSTS:elasticsearch:9200}' + username: '${ELASTICSEARCH_USERNAME:}' + password: '${ELASTICSEARCH_PASSWORD:}' diff --git a/x-pack/journalbeat/journalbeat.reference.yml b/x-pack/journalbeat/journalbeat.reference.yml new file mode 100644 index 00000000000..b53113f516e --- /dev/null +++ b/x-pack/journalbeat/journalbeat.reference.yml @@ -0,0 +1,1187 @@ +###################### Journalbeat Configuration Example ######################### + +# This file is an example configuration file highlighting only the most common +# options. The journalbeat.reference.yml file from the same directory contains all the +# supported options with more comments. You can use it as a reference. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/journalbeat/index.html + +# For more available modules and options, please see the journalbeat.reference.yml sample +# configuration file. + +#=========================== Journalbeat inputs ============================= + +journalbeat.inputs: + # Paths that should be crawled and fetched. Possible values files and directories. + # When setting a directory, all journals under it are merged. + # When empty starts to read from local journal. +- paths: [] + + # The number of seconds to wait before trying to read again from journals. + #backoff: 1s + # The maximum number of seconds to wait before attempting to read again from journals. + #max_backoff: 20s + + # Position to start reading from journal. Valid values: head, tail, cursor + seek: cursor + # Fallback position if no cursor data is available. + #cursor_seek_fallback: head + + # Exact matching for field values of events. + # Matching for nginx entries: "systemd.unit=nginx" + #include_matches: [] + + # Optional fields that you can specify to add additional information to the + # output. Fields can be scalar values, arrays, dictionaries, or any nested + # combination of these. + #fields: + # env: staging + + +#========================= Journalbeat global options ============================ +#journalbeat: + # Name of the registry file. If a relative path is used, it is considered relative to the + # data path. + #registry_file: registry + +#================================ General ====================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +# If this options is not defined, the hostname is used. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. Tags make it easy to group servers by different +# logical properties. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. Fields can be scalar values, arrays, dictionaries, or any nested +# combination of these. +#fields: +# env: staging + +# If this option is set to true, the custom fields are stored as top-level +# fields in the output document instead of being grouped under a fields +# sub-dictionary. Default is false. +#fields_under_root: false + +# Internal queue configuration for buffering events to be published. +#queue: + # Queue type by name (default 'mem') + # The memory queue will present all available events (up to the outputs + # bulk_max_size) to the output, the moment the output is ready to server + # another batch of events. + #mem: + # Max number of events the queue can buffer. + #events: 4096 + + # Hints the minimum number of events stored in the queue, + # before providing a batch of events to the outputs. + # The default value is set to 2048. + # A value of 0 ensures events are immediately available + # to be sent to the outputs. + #flush.min_events: 2048 + + # Maximum duration after which events are available to the outputs, + # if the number of events stored in the queue is < min_flush_events. + #flush.timeout: 1s + + # The spool queue will store events in a local spool file, before + # forwarding the events to the outputs. + # + # Beta: spooling to disk is currently a beta feature. Use with care. + # + # The spool file is a circular buffer, which blocks once the file/buffer is full. + # Events are put into a write buffer and flushed once the write buffer + # is full or the flush_timeout is triggered. + # Once ACKed by the output, events are removed immediately from the queue, + # making space for new events to be persisted. + #spool: + # The file namespace configures the file path and the file creation settings. + # Once the file exists, the `size`, `page_size` and `prealloc` settings + # will have no more effect. + #file: + # Location of spool file. The default value is ${path.data}/spool.dat. + #path: "${path.data}/spool.dat" + + # Configure file permissions if file is created. The default value is 0600. + #permissions: 0600 + + # File size hint. The spool blocks, once this limit is reached. The default value is 100 MiB. + #size: 100MiB + + # The files page size. A file is split into multiple pages of the same size. The default value is 4KiB. + #page_size: 4KiB + + # If prealloc is set, the required space for the file is reserved using + # truncate. The default value is true. + #prealloc: true + + # Spool writer settings + # Events are serialized into a write buffer. The write buffer is flushed if: + # - The buffer limit has been reached. + # - The configured limit of buffered events is reached. + # - The flush timeout is triggered. + #write: + # Sets the write buffer size. + #buffer_size: 1MiB + + # Maximum duration after which events are flushed, if the write buffer + # is not full yet. The default value is 1s. + #flush.timeout: 1s + + # Number of maximum buffered events. The write buffer is flushed once the + # limit is reached. + #flush.events: 16384 + + # Configure the on-disk event encoding. The encoding can be changed + # between restarts. + # Valid encodings are: json, ubjson, and cbor. + #codec: cbor + #read: + # Reader flush timeout, waiting for more events to become available, so + # to fill a complete batch, as required by the outputs. + # If flush_timeout is 0, all available events are forwarded to the + # outputs immediately. + # The default value is 0s. + #flush.timeout: 0s + +# Sets the maximum number of CPUs that can be executing simultaneously. The +# default is the number of logical CPUs available in the system. +#max_procs: + +#================================ Processors =================================== + +# Processors are used to reduce the number of fields in the exported event or to +# enhance the event with external metadata. This section defines a list of +# processors that are applied one by one and the first one receives the initial +# event: +# +# event -> filter1 -> event1 -> filter2 ->event2 ... +# +# The supported processors are drop_fields, drop_event, include_fields, +# decode_json_fields, and add_cloud_metadata. +# +# For example, you can use the following processors to keep the fields that +# contain CPU load percentages, but remove the fields that contain CPU ticks +# values: +# +#processors: +#- include_fields: +# fields: ["cpu"] +#- drop_fields: +# fields: ["cpu.user", "cpu.system"] +# +# The following example drops the events that have the HTTP response code 200: +# +#processors: +#- drop_event: +# when: +# equals: +# http.code: 200 +# +# The following example renames the field a to b: +# +#processors: +#- rename: +# fields: +# - from: "a" +# to: "b" +# +# The following example tokenizes the string into fields: +# +#processors: +#- dissect: +# tokenizer: "%{key1} - %{key2}" +# field: "message" +# target_prefix: "dissect" +# +# The following example enriches each event with metadata from the cloud +# provider about the host machine. It works on EC2, GCE, DigitalOcean, +# Tencent Cloud, and Alibaba Cloud. +# +#processors: +#- add_cloud_metadata: ~ +# +# The following example enriches each event with the machine's local time zone +# offset from UTC. +# +#processors: +#- add_locale: +# format: offset +# +# The following example enriches each event with docker metadata, it matches +# given fields to an existing container id and adds info from that container: +# +#processors: +#- add_docker_metadata: +# host: "unix:///var/run/docker.sock" +# match_fields: ["system.process.cgroup.id"] +# match_pids: ["process.pid", "process.ppid"] +# match_source: true +# match_source_index: 4 +# match_short_id: false +# cleanup_timeout: 60 +# labels.dedot: false +# # To connect to Docker over TLS you must specify a client and CA certificate. +# #ssl: +# # certificate_authority: "/etc/pki/root/ca.pem" +# # certificate: "/etc/pki/client/cert.pem" +# # key: "/etc/pki/client/cert.key" +# +# The following example enriches each event with docker metadata, it matches +# container id from log path available in `source` field (by default it expects +# it to be /var/lib/docker/containers/*/*.log). +# +#processors: +#- add_docker_metadata: ~ +# +# The following example enriches each event with host metadata. +# +#processors: +#- add_host_metadata: +# netinfo.enabled: false +# +# The following example enriches each event with process metadata using +# process IDs included in the event. +# +#processors: +#- add_process_metadata: +# match_pids: ["system.process.ppid"] +# target: system.process.parent +# +# The following example decodes fields containing JSON strings +# and replaces the strings with valid JSON objects. +# +#processors: +#- decode_json_fields: +# fields: ["field1", "field2", ...] +# process_array: false +# max_depth: 1 +# target: "" +# overwrite_keys: false + +#============================= Elastic Cloud ================================== + +# These settings simplify using journalbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ====================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------- +output.elasticsearch: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + #ilm.rollover_alias: "journalbeat" + #ilm.pattern: "{now/d}-000001" + + # Set gzip compression level. + #compression_level: 0 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Number of workers per Elasticsearch host. + #worker: 1 + + # Optional index name. The default is "journalbeat" plus date + # and generates [journalbeat-]YYYY.MM.DD keys. + # In case you modify this pattern you must update setup.template.name and setup.template.pattern accordingly. + #index: "journalbeat-%{[beat.version]}-%{+yyyy.MM.dd}" + + # Optional ingest node pipeline. By default no pipeline will be used. + #pipeline: "" + + # Optional HTTP Path + #path: "/elasticsearch" + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing a request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + +#----------------------------- Logstash output --------------------------------- +#output.logstash: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Number of workers per Logstash host. + #worker: 1 + + # Set gzip compression level. + #compression_level: 3 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional maximum time to live for a connection to Logstash, after which the + # connection will be re-established. A value of `0s` (the default) will + # disable this feature. + # + # Not yet supported for async connections (i.e. with the "pipelining" option set) + #ttl: 30s + + # Optional load balance the events between the Logstash hosts. Default is false. + #loadbalance: false + + # Number of batches to be sent asynchronously to Logstash while processing + # new batches. + #pipelining: 2 + + # If enabled only a subset of events in a batch of events is transferred per + # transaction. The number of events to be sent increases up to `bulk_max_size` + # if no error is encountered. + #slow_start: false + + # The number of seconds to wait before trying to reconnect to Logstash + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Logstash after a network error. The default is 60s. + #backoff.max: 60s + + # Optional index name. The default index name is set to journalbeat + # in all lowercase. + #index: 'journalbeat' + + # SOCKS5 proxy server URL + #proxy_url: socks5://user:password@socks5-server:2233 + + # Resolve names locally when using a proxy server. Defaults to false. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat and Winlogbeat, ignore the max_retries setting + # and retry until all events are published. Set max_retries to a value less + # than 0 to retry until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Logstash request. The + # default is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Logstash server before + # timing out. The default is 30s. + #timeout: 30s + +#------------------------------- Kafka output ---------------------------------- +#output.kafka: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The list of Kafka broker addresses from where to fetch the cluster metadata. + # The cluster metadata contain the actual Kafka brokers events are published + # to. + #hosts: ["localhost:9092"] + + # The Kafka topic used for produced events. The setting can be a format string + # using any event field. To set the topic from document type use `%{[type]}`. + #topic: beats + + # The Kafka event key setting. Use format string to create unique event key. + # By default no event key will be generated. + #key: '' + + # The Kafka event partitioning strategy. Default hashing strategy is `hash` + # using the `output.kafka.key` setting or randomly distributes events if + # `output.kafka.key` is not configured. + #partition.hash: + # If enabled, events will only be published to partitions with reachable + # leaders. Default is false. + #reachable_only: false + + # Configure alternative event field names used to compute the hash value. + # If empty `output.kafka.key` setting will be used. + # Default value is empty list. + #hash: [] + + # Authentication details. Password is required if username is set. + #username: '' + #password: '' + + # Kafka version journalbeat is assumed to run against. Defaults to the "1.0.0". + #version: '1.0.0' + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Metadata update configuration. Metadata do contain leader information + # deciding which broker to use when publishing. + #metadata: + # Max metadata request retry attempts when cluster is in middle of leader + # election. Defaults to 3 retries. + #retry.max: 3 + + # Waiting time between retries during leader elections. Default is 250ms. + #retry.backoff: 250ms + + # Refresh metadata interval. Defaults to every 10 minutes. + #refresh_frequency: 10m + + # The number of concurrent load-balanced Kafka output workers. + #worker: 1 + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Kafka request. The default + # is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Kafka brokers before + # timing out. The default is 30s. + #timeout: 30s + + # The maximum duration a broker will wait for number of required ACKs. The + # default is 10s. + #broker_timeout: 10s + + # The number of messages buffered for each Kafka broker. The default is 256. + #channel_buffer_size: 256 + + # The keep-alive period for an active network connection. If 0s, keep-alives + # are disabled. The default is 0 seconds. + #keep_alive: 0 + + # Sets the output compression codec. Must be one of none, snappy and gzip. The + # default is gzip. + #compression: gzip + + # Set the compression level. Currently only gzip provides a compression level + # between 0 and 9. The default value is chosen by the compression algorithm. + #compression_level: 4 + + # The maximum permitted size of JSON-encoded messages. Bigger messages will be + # dropped. The default value is 1000000 (bytes). This value should be equal to + # or less than the broker's message.max.bytes. + #max_message_bytes: 1000000 + + # The ACK reliability level required from broker. 0=no response, 1=wait for + # local commit, -1=wait for all replicas to commit. The default is 1. Note: + # If set to 0, no ACKs are returned by Kafka. Messages might be lost silently + # on error. + #required_acks: 1 + + # The configurable ClientID used for logging, debugging, and auditing + # purposes. The default is "beats". + #client_id: beats + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- Redis output ---------------------------------- +#output.redis: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # The list of Redis servers to connect to. If load balancing is enabled, the + # events are distributed to the servers in the list. If one server becomes + # unreachable, the events are distributed to the reachable servers only. + #hosts: ["localhost:6379"] + + # The Redis port to use if hosts does not contain a port number. The default + # is 6379. + #port: 6379 + + # The name of the Redis list or channel the events are published to. The + # default is journalbeat. + #key: journalbeat + + # The password to authenticate with. The default is no authentication. + #password: + + # The Redis database number where the events are published. The default is 0. + #db: 0 + + # The Redis data type to use for publishing events. If the data type is list, + # the Redis RPUSH command is used. If the data type is channel, the Redis + # PUBLISH command is used. The default value is list. + #datatype: list + + # The number of workers to use for each host configured to publish events to + # Redis. Use this setting along with the loadbalance option. For example, if + # you have 2 hosts and 3 workers, in total 6 workers are started (3 for each + # host). + #worker: 1 + + # If set to true and multiple hosts or workers are configured, the output + # plugin load balances published events onto all Redis hosts. If set to false, + # the output plugin sends all events to only one host (determined at random) + # and will switch to another host if the currently selected one becomes + # unreachable. The default value is true. + #loadbalance: true + + # The Redis connection timeout in seconds. The default is 5 seconds. + #timeout: 5s + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The number of seconds to wait before trying to reconnect to Redis + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Redis after a network error. The default is 60s. + #backoff.max: 60s + + # The maximum number of events to bulk in a single Redis request or pipeline. + # The default is 2048. + #bulk_max_size: 2048 + + # The URL of the SOCKS5 proxy to use when connecting to the Redis servers. The + # value must be a URL with a scheme of socks5://. + #proxy_url: + + # This option determines whether Redis hostnames are resolved locally when + # using a proxy. The default value is false, which means that name resolution + # occurs on the proxy server. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- File output ----------------------------------- +#output.file: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Path to the directory where to save the generated files. The option is + # mandatory. + #path: "/tmp/journalbeat" + + # Name of the generated files. The default is `journalbeat` and it generates + # files: `journalbeat`, `journalbeat.1`, `journalbeat.2`, etc. + #filename: journalbeat + + # Maximum size in kilobytes of each file. When this size is reached, and on + # every journalbeat restart, the files are rotated. The default value is 10240 + # kB. + #rotate_every_kb: 10000 + + # Maximum number of files under path. When this number of files is reached, + # the oldest file is deleted and the rest are shifted from last to first. The + # default is 7 files. + #number_of_files: 7 + + # Permissions to use for file creation. The default is 0600. + #permissions: 0600 + + +#----------------------------- Console output --------------------------------- +#output.console: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + +#================================= Paths ====================================== + +# The home path for the journalbeat installation. This is the default base path +# for all other path settings and for miscellaneous files that come with the +# distribution (for example, the sample dashboards). +# If not set by a CLI flag or in the configuration file, the default for the +# home path is the location of the binary. +#path.home: + +# The configuration path for the journalbeat installation. This is the default +# base path for configuration files, including the main YAML configuration file +# and the Elasticsearch template file. If not set by a CLI flag or in the +# configuration file, the default for the configuration path is the home path. +#path.config: ${path.home} + +# The data path for the journalbeat installation. This is the default base path +# for all the files in which journalbeat needs to store its data. If not set by a +# CLI flag or in the configuration file, the default for the data path is a data +# subdirectory inside the home path. +#path.data: ${path.home}/data + +# The logs path for a journalbeat installation. This is the default location for +# the Beat's log files. If not set by a CLI flag or in the configuration file, +# the default for the logs path is a logs subdirectory inside the home path. +#path.logs: ${path.home}/logs + +#================================ Keystore ========================================== +# Location of the Keystore containing the keys and their sensitive values. +#keystore.path: "${path.config}/beats.keystore" + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards are disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The directory from where to read the dashboards. The default is the `kibana` +# folder in the home path. +#setup.dashboards.directory: ${path.home}/kibana + +# The URL from where to download the dashboards archive. It is used instead of +# the directory if it has a value. +#setup.dashboards.url: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the directory when it has a value. +#setup.dashboards.file: + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#setup.dashboards.beat: journalbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#setup.dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#setup.dashboards.index: + +# Always use the Kibana API for loading the dashboards instead of autodetecting +# how to install the dashboards by first querying Elasticsearch. +#setup.dashboards.always_kibana: false + +# If true and Kibana is not reachable at the time when dashboards are loaded, +# it will retry to reconnect to Kibana instead of exiting with an error. +#setup.dashboards.retry.enabled: false + +# Duration interval between Kibana connection retries. +#setup.dashboards.retry.interval: 1s + +# Maximum number of retries before exiting with an error, 0 for unlimited retrying. +#setup.dashboards.retry.maximum: 0 + + +#============================== Template ===================================== + +# A template is used to set the mapping in Elasticsearch +# By default template loading is enabled and the template is loaded. +# These settings can be adjusted to load your own template or overwrite existing ones. + +# Set to false to disable template loading. +#setup.template.enabled: true + +# Template name. By default the template name is "journalbeat-%{[beat.version]}" +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.name: "journalbeat-%{[beat.version]}" + +# Template pattern. By default the template pattern is "-%{[beat.version]}-*" to apply to the default index settings. +# The first part is the version of the beat and then -* is used to match all daily indices. +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.pattern: "journalbeat-%{[beat.version]}-*" + +# Path to fields.yml file to generate the template +#setup.template.fields: "${path.config}/fields.yml" + +# A list of fields to be added to the template and Kibana index pattern. Also +# specify setup.template.overwrite: true to overwrite the existing template. +# This setting is experimental. +#setup.template.append_fields: +#- name: field_name +# type: field_type + +# Enable json template loading. If this is enabled, the fields.yml is ignored. +#setup.template.json.enabled: false + +# Path to the json template file +#setup.template.json.path: "${path.config}/template.json" + +# Name under which the template is stored in Elasticsearch +#setup.template.json.name: "" + +# Overwrite existing template +#setup.template.overwrite: false + +# Elasticsearch template settings +setup.template.settings: + + # A dictionary of settings to place into the settings.index dictionary + # of the Elasticsearch template. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html + #index: + #number_of_shards: 1 + #codec: best_compression + #number_of_routing_shards: 30 + + # A dictionary of settings for the _source field. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html + #_source: + #enabled: false + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Optional HTTP Path + #path: "" + + # Use SSL settings for HTTPS. Default is true. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + + +#================================ Logging ====================================== +# There are four options for the log output: file, stderr, syslog, eventlog +# The file output is the default. + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: info + +# Enable debug output for selected components. To enable all selectors use ["*"] +# Other available selectors are "beat", "publish", "service" +# Multiple selectors can be chained. +#logging.selectors: [ ] + +# Send all logging output to syslog. The default is false. +#logging.to_syslog: false + +# Send all logging output to Windows Event Logs. The default is false. +#logging.to_eventlog: false + +# If enabled, journalbeat periodically logs its internal metrics that have changed +# in the last period. For each metric that changed, the delta from the value at +# the beginning of the period is logged. Also, the total values for +# all non-zero internal metrics are logged on shutdown. The default is true. +#logging.metrics.enabled: true + +# The period after which to log the internal metrics. The default is 30s. +#logging.metrics.period: 30s + +# Logging to rotating files. Set logging.to_files to false to disable logging to +# files. +logging.to_files: true +logging.files: + # Configure the path where the logs are written. The default is the logs directory + # under the home path (the binary location). + #path: /var/log/journalbeat + + # The name of the files where the logs are written to. + #name: journalbeat + + # Configure log file size limit. If limit is reached, log file will be + # automatically rotated + #rotateeverybytes: 10485760 # = 10MB + + # Number of rotated log files to keep. Oldest files will be deleted first. + #keepfiles: 7 + + # The permissions mask to apply when rotating log files. The default value is 0600. + # Must be a valid Unix-style file permissions mask expressed in octal notation. + #permissions: 0600 + + # Enable log file rotation on time intervals in addition to size-based rotation. + # Intervals must be at least 1s. Values of 1m, 1h, 24h, 7*24h, 30*24h, and 365*24h + # are boundary-aligned with minutes, hours, days, weeks, months, and years as + # reported by the local system clock. All other intervals are calculated from the + # unix epoch. Defaults to disabled. + #interval: 0 + +# Set to true to log messages in json format. +#logging.json: false + + +#============================== Xpack Monitoring ===================================== +# journalbeat can export internal metrics to a central Elasticsearch monitoring cluster. +# This requires xpack monitoring to be enabled in Elasticsearch. +# The reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line, and leave the rest commented out. +#xpack.monitoring.elasticsearch: + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + #hosts: ["localhost:9200"] + + # Set gzip compression level. + #compression_level: 0 + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "beats_system" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing an request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + #metrics.period: 10s + #state.period: 1m + +#================================ HTTP Endpoint ====================================== +# Each beat can expose internal metrics through a HTTP endpoint. For security +# reasons the endpoint is disabled by default. This feature is currently experimental. +# Stats can be access through http://localhost:5066/stats . For pretty JSON output +# append ?pretty to the URL. + +# Defines if the HTTP endpoint is enabled. +#http.enabled: false + +# The HTTP endpoint will bind to this hostname or IP address. It is recommended to use only localhost. +#http.host: localhost + +# Port on which the HTTP endpoint will bind. Default is 5066. +#http.port: 5066 + +#============================= Process Security ================================ + +# Enable or disable seccomp system call filtering on Linux. Default is enabled. +#seccomp.enabled: true diff --git a/x-pack/journalbeat/journalbeat.yml b/x-pack/journalbeat/journalbeat.yml new file mode 100644 index 00000000000..a12f6c28887 --- /dev/null +++ b/x-pack/journalbeat/journalbeat.yml @@ -0,0 +1,170 @@ +###################### Journalbeat Configuration Example ######################### + +# This file is an example configuration file highlighting only the most common +# options. The journalbeat.reference.yml file from the same directory contains all the +# supported options with more comments. You can use it as a reference. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/journalbeat/index.html + +# For more available modules and options, please see the journalbeat.reference.yml sample +# configuration file. + +#=========================== Journalbeat inputs ============================= + +journalbeat.inputs: + # Paths that should be crawled and fetched. Possible values files and directories. + # When setting a directory, all journals under it are merged. + # When empty starts to read from local journal. +- paths: [] + + # The number of seconds to wait before trying to read again from journals. + #backoff: 1s + # The maximum number of seconds to wait before attempting to read again from journals. + #max_backoff: 20s + + # Position to start reading from journal. Valid values: head, tail, cursor + seek: cursor + # Fallback position if no cursor data is available. + #cursor_seek_fallback: head + + # Exact matching for field values of events. + # Matching for nginx entries: "systemd.unit=nginx" + #include_matches: [] + + # Optional fields that you can specify to add additional information to the + # output. Fields can be scalar values, arrays, dictionaries, or any nested + # combination of these. + #fields: + # env: staging + + +#========================= Journalbeat global options ============================ +#journalbeat: + # Name of the registry file. If a relative path is used, it is considered relative to the + # data path. + #registry_file: registry + +#================================ General ===================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. +#fields: +# env: staging + + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#setup.dashboards.url: + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Kibana Space ID + # ID of the Kibana Space into which the dashboards should be loaded. By default, + # the Default Space will be used. + #space.id: + +#============================= Elastic Cloud ================================== + +# These settings simplify using journalbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ===================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------ +output.elasticsearch: + # Array of hosts to connect to. + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + +#----------------------------- Logstash output -------------------------------- +#output.logstash: + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Optional SSL. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + +#================================ Processors ===================================== + +# Configure processors to enhance or manipulate events generated by the beat. + +processors: + - add_host_metadata: ~ + - add_cloud_metadata: ~ + +#================================ Logging ===================================== + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: debug + +# At debug level, you can selectively enable logging only for some components. +# To enable all selectors use ["*"]. Examples of other selectors are "beat", +# "publish", "service". +#logging.selectors: ["*"] + +#============================== Xpack Monitoring =============================== +# journalbeat can export internal metrics to a central Elasticsearch monitoring +# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The +# reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line. +#xpack.monitoring.elasticsearch: diff --git a/x-pack/journalbeat/magefile.go b/x-pack/journalbeat/magefile.go new file mode 100644 index 00000000000..d88e6dfe27b --- /dev/null +++ b/x-pack/journalbeat/magefile.go @@ -0,0 +1,32 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// +build mage + +package main + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + journalbeat "github.com/elastic/beats/journalbeat/scripts/mage" +) + +func init() { + journalbeat.SelectLogic = mage.XPackProject + + mage.BeatLicense = "Elastic License" +} + +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(journalbeat.Update.All) } diff --git a/x-pack/journalbeat/main.go b/x-pack/journalbeat/main.go index f80646bfc49..bd641542e7b 100644 --- a/x-pack/journalbeat/main.go +++ b/x-pack/journalbeat/main.go @@ -8,8 +8,6 @@ import ( "os" "github.com/elastic/beats/x-pack/journalbeat/cmd" - - _ "github.com/elastic/beats/journalbeat/include" ) func main() { diff --git a/x-pack/journalbeat/make.bat b/x-pack/journalbeat/make.bat new file mode 100644 index 00000000000..81de1ba946f --- /dev/null +++ b/x-pack/journalbeat/make.bat @@ -0,0 +1,11 @@ +@echo off + +REM Windows wrapper for Mage (https://magefile.org/) that installs it +REM to %GOPATH%\bin from the Beats vendor directory. +REM +REM After running this once you may invoke mage.exe directly. + +WHERE mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage + +mage %* From 7dbddcaeec922f44a9eb2364fb2bf69156c8a1b7 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:56:03 -0500 Subject: [PATCH 07/18] Refactor libbeat build logic --- libbeat/Dockerfile | 28 ++++++------- libbeat/Makefile | 13 ++---- libbeat/docker-compose.yml | 6 +-- libbeat/magefile.go | 48 ++++++++++----------- libbeat/scripts/mage/fields.go | 60 +++++++++++++++++++++++++++ libbeat/scripts/mage/update.go | 51 +++++++++++++++++++++++ libbeat/tests/system/keystore.py | 14 +++---- libbeat/tests/system/requirements.txt | 1 + x-pack/libbeat/Dockerfile | 24 ++++++----- x-pack/libbeat/Makefile | 15 ++----- x-pack/libbeat/docker-compose.yml | 2 +- x-pack/libbeat/magefile.go | 40 ++++++++---------- x-pack/libbeat/make.bat | 11 +++++ 13 files changed, 206 insertions(+), 107 deletions(-) create mode 100644 libbeat/scripts/mage/fields.go create mode 100644 libbeat/scripts/mage/update.go create mode 100644 x-pack/libbeat/make.bat diff --git a/libbeat/Dockerfile b/libbeat/Dockerfile index 8d39cf584b8..3667155b10c 100644 --- a/libbeat/Dockerfile +++ b/libbeat/Dockerfile @@ -1,21 +1,17 @@ -# Beats dockerfile used for testing FROM golang:1.11.4 -MAINTAINER Nicolas Ruflin -RUN set -x && \ - apt-get update && \ - apt-get install -y --no-install-recommends \ - netcat python-pip virtualenv libpcap-dev && \ - apt-get clean +RUN \ + apt-get update \ + && apt-get install -y --no-install-recommends \ + netcat \ + python-pip \ + virtualenv \ + libpcap-dev \ + && rm -rf /var/lib/apt/lists/* -ENV PYTHON_ENV=/tmp/python-env +RUN pip install --upgrade pip +RUN pip install --upgrade setuptools +RUN pip install --upgrade docker-compose==1.21.0 -RUN test -d ${PYTHON_ENV} || virtualenv ${PYTHON_ENV} -COPY ./tests/system/requirements.txt /tmp/requirements.txt - -# Upgrade pip to make sure to have the most recent version -RUN . ${PYTHON_ENV}/bin/activate && pip install -U pip -RUN . ${PYTHON_ENV}/bin/activate && pip install -Ur /tmp/requirements.txt - -# Libbeat specific +# Libbeat specific. RUN mkdir -p /etc/pki/tls/certs diff --git a/libbeat/Makefile b/libbeat/Makefile index 654e8ae06a7..0326f3e977d 100644 --- a/libbeat/Makefile +++ b/libbeat/Makefile @@ -1,9 +1,4 @@ -BEAT_NAME=libbeat -TEST_ENVIRONMENT?=true -SYSTEM_TESTS=true - -include scripts/Makefile - -# Collects all dependencies and then calls update -.PHONY: collect -collect: +# +# Includes +# +include ../dev-tools/make/oss.mk diff --git a/libbeat/docker-compose.yml b/libbeat/docker-compose.yml index a75a1f830b2..db7f8df6a09 100644 --- a/libbeat/docker-compose.yml +++ b/libbeat/docker-compose.yml @@ -19,7 +19,7 @@ services: - KIBANA_HOST=kibana - KIBANA_PORT=5601 env_file: - - ${PWD}/build/test.env + - ${PWD}/../testing/environments/test.env volumes: - ${PWD}/..:/go/src/github.com/elastic/beats/ # Used for docker integration tests: @@ -53,7 +53,7 @@ services: file: ${ES_BEATS}/testing/environments/${TESTING_ENVIRONMENT}.yml service: logstash env_file: - - ${PWD}/build/test.env + - ${PWD}/../testing/environments/test.env depends_on: elasticsearch: condition: service_healthy @@ -73,7 +73,7 @@ services: - REDIS_HOST=redis - REDIS_PORT=6379 env_file: - - ${PWD}/build/test.env + - ${PWD}/../testing/environments/test.env kafka: build: ${ES_BEATS}/testing/environments/docker/kafka diff --git a/libbeat/magefile.go b/libbeat/magefile.go index 61290e0835c..d2231290c48 100644 --- a/libbeat/magefile.go +++ b/libbeat/magefile.go @@ -20,36 +20,30 @@ package main import ( - "context" + "github.com/magefile/mage/mg" "github.com/elastic/beats/dev-tools/mage" -) - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} -// Fields generates a fields.yml for the Beat. -func Fields() error { - return mage.GenerateFieldsYAML("processors") -} + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + libbeat "github.com/elastic/beats/libbeat/scripts/mage" +) -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) +func init() { + libbeat.SelectLogic = mage.OSSProject } -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(libbeat.Update.All) } diff --git a/libbeat/scripts/mage/fields.go b/libbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..c4dbfbc1624 --- /dev/null +++ b/libbeat/scripts/mage/fields.go @@ -0,0 +1,60 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (fieldsBuilder) FieldsGo() error { + // TODO: Currently libbeat does not have any fields.go files because each + // Beat is incorporating this into its include/fields.go file. + return nil +} + +func (fieldsBuilder) FieldsYML() error { + var modules []string + switch SelectLogic { + case mage.OSSProject, mage.XPackProject: + modules = append(modules, mage.OSSBeatDir("processors")) + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (fieldsBuilder) FieldsAllYML() error { + // This isn't used because we don't generate any fields docs for libbeat. + return nil +} diff --git a/libbeat/scripts/mage/update.go b/libbeat/scripts/mage/update.go new file mode 100644 index 00000000000..73729723427 --- /dev/null +++ b/libbeat/scripts/mage/update.go @@ -0,0 +1,51 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/integtest" + "github.com/elastic/beats/dev-tools/mage/target/unittest" +) + +func init() { + unittest.RegisterGoTestDeps(Update.Fields) + unittest.RegisterPythonTestDeps(Update.Fields) + + integtest.RegisterPythonTestDeps(Update.Fields) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields) +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} diff --git a/libbeat/tests/system/keystore.py b/libbeat/tests/system/keystore.py index 61c64a4ce58..777f72206c5 100644 --- a/libbeat/tests/system/keystore.py +++ b/libbeat/tests/system/keystore.py @@ -13,13 +13,13 @@ def add_secret(self, key, value="hello world\n", force=False): Add new secret using the --stdin option """ args = [self.test_binary, - "-systemTest", - "-test.coverprofile", - os.path.join(self.working_dir, "coverage.cov"), - "-c", os.path.join(self.working_dir, "mockbeat.yml"), - "-e", "-v", "-d", "*", - "keystore", "add", key, "--stdin", - ] + "-systemTest"] + if os.getenv("TEST_COVERAGE") == "true": + args += ["-test.coverprofile", + os.path.join(self.working_dir, "coverage.cov")] + args += ["-c", os.path.join(self.working_dir, "mockbeat.yml"), + "-e", "-v", "-d", "*", + "keystore", "add", key, "--stdin"] if force: args.append("--force") diff --git a/libbeat/tests/system/requirements.txt b/libbeat/tests/system/requirements.txt index 40159ad8aaa..c99bd92113e 100644 --- a/libbeat/tests/system/requirements.txt +++ b/libbeat/tests/system/requirements.txt @@ -15,6 +15,7 @@ idna==2.6 ipaddress==1.0.19 Jinja2==2.10 jsonschema==2.6.0 +kafka-python==1.4.3 MarkupSafe==1.0 nose==1.3.7 nose-timer==0.7.1 diff --git a/x-pack/libbeat/Dockerfile b/x-pack/libbeat/Dockerfile index db0dc92c7ed..3667155b10c 100644 --- a/x-pack/libbeat/Dockerfile +++ b/x-pack/libbeat/Dockerfile @@ -1,15 +1,17 @@ -FROM golang:1.10.3 +FROM golang:1.11.4 -RUN set -x && \ - apt-get update && \ - apt-get install -y --no-install-recommends \ - netcat python-pip rsync virtualenv && \ - apt-get clean +RUN \ + apt-get update \ + && apt-get install -y --no-install-recommends \ + netcat \ + python-pip \ + virtualenv \ + libpcap-dev \ + && rm -rf /var/lib/apt/lists/* +RUN pip install --upgrade pip RUN pip install --upgrade setuptools +RUN pip install --upgrade docker-compose==1.21.0 -# Setup work environment -ENV LIBBEAT_PATH /go/src/github.com/elastic/beats/x-pack/libbeat - -RUN mkdir -p $LIBBEAT_PATH/build/coverage -WORKDIR $LIBBEAT_PATH +# Libbeat specific. +RUN mkdir -p /etc/pki/tls/certs diff --git a/x-pack/libbeat/Makefile b/x-pack/libbeat/Makefile index cd48c57d0cd..7427a5c672b 100644 --- a/x-pack/libbeat/Makefile +++ b/x-pack/libbeat/Makefile @@ -1,11 +1,4 @@ -BEAT_NAME?=libbeat -BEAT_PATH?=github.com/elastic/beats/x-pack/${BEAT_NAME} -ES_BEATS?=../.. -LICENSE=Elastic -SYSTEM_TESTS?=true -TEST_ENVIRONMENT?=true - -# Path to the libbeat Makefile -include $(ES_BEATS)/libbeat/scripts/Makefile - -collect update fields: ; \ No newline at end of file +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/libbeat/docker-compose.yml b/x-pack/libbeat/docker-compose.yml index a09715b672a..a28d57f3131 100644 --- a/x-pack/libbeat/docker-compose.yml +++ b/x-pack/libbeat/docker-compose.yml @@ -5,7 +5,7 @@ services: depends_on: - proxy_dep env_file: - - ${PWD}/build/test.env + - ${PWD}/../../testing/environments/test.env working_dir: /go/src/github.com/elastic/beats/x-pack/libbeat volumes: - ${PWD}/../..:/go/src/github.com/elastic/beats/ diff --git a/x-pack/libbeat/magefile.go b/x-pack/libbeat/magefile.go index e2c438e8971..dc43340d79b 100644 --- a/x-pack/libbeat/magefile.go +++ b/x-pack/libbeat/magefile.go @@ -7,31 +7,27 @@ package main import ( - "context" + "github.com/magefile/mage/mg" "github.com/elastic/beats/dev-tools/mage" -) - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // TODO: Import integtest. Skipped due to https://github.com/elastic/beats/issues/9597. + // mage:import + libbeat "github.com/elastic/beats/libbeat/scripts/mage" +) -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) +func init() { + libbeat.SelectLogic = mage.OSSProject } -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(libbeat.Update.All) } diff --git a/x-pack/libbeat/make.bat b/x-pack/libbeat/make.bat new file mode 100644 index 00000000000..81de1ba946f --- /dev/null +++ b/x-pack/libbeat/make.bat @@ -0,0 +1,11 @@ +@echo off + +REM Windows wrapper for Mage (https://magefile.org/) that installs it +REM to %GOPATH%\bin from the Beats vendor directory. +REM +REM After running this once you may invoke mage.exe directly. + +WHERE mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage + +mage %* From e6e3a114ac6fb158fac4a3b7f70ca8269be5e3f1 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:56:45 -0500 Subject: [PATCH 08/18] Refactor metricbeat build logic --- generator/common/Makefile | 9 +- generator/metricbeat/Makefile | 11 +- generator/metricbeat/{beat}/Makefile | 2 +- generator/metricbeat/{beat}/magefile.go | 98 ++---- metricbeat/Makefile | 91 ++---- metricbeat/cmd/root.go | 1 - metricbeat/docs/fields.asciidoc | 32 ++ metricbeat/docs/modules/system.asciidoc | 4 + metricbeat/include/fields.go | 4 +- metricbeat/include/list.go | 10 +- metricbeat/magefile.go | 190 ++---------- metricbeat/metricbeat.reference.yml | 72 ++--- .../module/system/_meta/config.reference.yml | 4 + metricbeat/module/system/_meta/config.yml | 4 + metricbeat/scripts/generate_imports_helper.py | 27 -- metricbeat/scripts/mage/config.go | 64 ++++ metricbeat/scripts/mage/fields.go | 100 +++++++ metricbeat/scripts/mage/generate.go | 65 ++++ metricbeat/scripts/mage/package.go | 113 +++++++ metricbeat/scripts/mage/update.go | 146 +++++++++ metricbeat/tests/system/test_base.py | 2 +- x-pack/metricbeat/Makefile | 7 +- x-pack/metricbeat/magefile.go | 279 ++---------------- x-pack/metricbeat/metricbeat.docker.yml | 11 + .../metricbeat/modules.d/mssql.yml.disabled | 3 + x-pack/metricbeat/packages.yml | 90 ------ 26 files changed, 691 insertions(+), 748 deletions(-) delete mode 100644 metricbeat/scripts/generate_imports_helper.py create mode 100644 metricbeat/scripts/mage/config.go create mode 100644 metricbeat/scripts/mage/fields.go create mode 100644 metricbeat/scripts/mage/generate.go create mode 100644 metricbeat/scripts/mage/package.go create mode 100644 metricbeat/scripts/mage/update.go create mode 100644 x-pack/metricbeat/metricbeat.docker.yml delete mode 100644 x-pack/metricbeat/packages.yml diff --git a/generator/common/Makefile b/generator/common/Makefile index 34694dd5712..ce23a86a418 100644 --- a/generator/common/Makefile +++ b/generator/common/Makefile @@ -5,6 +5,8 @@ BEAT_TYPE?=beat BEAT_PATH=${BUILD_DIR}/src/beatpath/testbeat ES_BEATS=${GOPATH}/src/github.com/elastic/beats PREPARE_COMMAND?= +BUILD_CMD?=$(MAKE) +TEST_CMD?=$(MAKE) unit # Runs test build for mock beat .PHONY: test @@ -14,15 +16,15 @@ test: prepare-test export PATH=$${GOPATH}/bin:${PATH}; \ cd ${BEAT_PATH} ; \ $(MAKE) copy-vendor || exit 1 ; \ - ${PREPARE_COMMAND} \ $(MAKE) git-init || exit 1 ; \ + ${PREPARE_COMMAND} \ $(MAKE) update || exit 1 ; \ git config user.email "beats-jenkins@test.com" || exit 1 ; \ git config user.name "beats-jenkins" || exit 1 ; \ $(MAKE) git-add || exit 1 ; \ $(MAKE) check CHECK_HEADERS_DISABLED=y || exit 1 ; \ - $(MAKE) || exit 1 ; \ - $(MAKE) unit + $(BUILD_CMD) || exit 1 ; \ + $(TEST_CMD) .PHONY: prepare-test prepare-test:: python-env @@ -30,6 +32,7 @@ prepare-test:: python-env mkdir -p ${BUILD_DIR}/src/github.com/elastic/beats/ rsync -a \ --include=vendor/github.com/magefile/mage/build \ + --include=dev-tools/mage/target/build \ --exclude=build/ \ --exclude=.git/ \ ${PWD}/../../* ${BUILD_DIR}/src/github.com/elastic/beats/ diff --git a/generator/metricbeat/Makefile b/generator/metricbeat/Makefile index 2441db7c351..cd91bed5420 100644 --- a/generator/metricbeat/Makefile +++ b/generator/metricbeat/Makefile @@ -1,13 +1,6 @@ BEAT_TYPE = metricbeat PREPARE_COMMAND = MODULE=elastic METRICSET=test make create-metricset; +BUILD_CMD = mage build +TEST_CMD = mage unitTest include ../common/Makefile - -.PHONY: prepare-test -prepare-test:: python-env - mkdir -p ${BEAT_PATH}/scripts - rsync -a --exclude=build ${PWD}/../../metricbeat/scripts/generate_imports_helper.py ${BEAT_PATH}/scripts - -# Collects all dependencies and then calls update -.PHONY: collect -collect: diff --git a/generator/metricbeat/{beat}/Makefile b/generator/metricbeat/{beat}/Makefile index 478a3f7a4e1..5fdd379ef19 100644 --- a/generator/metricbeat/{beat}/Makefile +++ b/generator/metricbeat/{beat}/Makefile @@ -32,4 +32,4 @@ git-init: .PHONY: git-add git-add: git add -A - git commit -m "Add generated {beat} files" + git commit -q -m "Add generated {beat} files" diff --git a/generator/metricbeat/{beat}/magefile.go b/generator/metricbeat/{beat}/magefile.go index a8a5931f823..f409ec7aa44 100644 --- a/generator/metricbeat/{beat}/magefile.go +++ b/generator/metricbeat/{beat}/magefile.go @@ -20,92 +20,34 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" - "github.com/magefile/mage/sh" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + metricbeat "github.com/elastic/beats/metricbeat/scripts/mage" ) func init() { + mage.BeatProjectType = mage.CommunityProject + mage.SetBuildVariableSources(mage.DefaultBeatBuildVariableSources) mage.BeatDescription = "One sentence description of the Beat." } -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseCommunityBeatPackaging() - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Update updates the generated files (aka make update). -func Update() error { - return sh.Run("make", "update") -} - -// Fields generates a fields.yml for the Beat. -func Fields() error { - return mage.GenerateFieldsYAML("module") -} - -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(metricbeat.Update.All) } diff --git a/metricbeat/Makefile b/metricbeat/Makefile index c78be44e0bf..6971d6ce7ce 100644 --- a/metricbeat/Makefile +++ b/metricbeat/Makefile @@ -1,62 +1,30 @@ -# Name can be overwritten, as Metricbeat is also a library -BEAT_NAME?=metricbeat -BEAT_TITLE?=Metricbeat -SYSTEM_TESTS?=true -TEST_ENVIRONMENT?=true -ES_BEATS?=.. - -# Metricbeat can only be cross-compiled on platforms not requiring CGO. -GOX_OS=netbsd linux windows -GOX_FLAGS=-arch="amd64 386 arm ppc64 ppc64le" - -DOCS_BRANCH=$(shell grep doc-branch ../libbeat/docs/version.asciidoc | cut -c 14-) - -include ${ES_BEATS}/libbeat/scripts/Makefile - -# Collects all module dashboards -.PHONY: kibana -kibana: - @rm -rf _meta/kibana.generated - @mkdir -p _meta/kibana.generated - @-cp -pr module/*/_meta/kibana/* _meta/kibana.generated - -# Collects all module docs -.PHONY: collect-docs -collect-docs: python-env - @rm -rf docs/modules - @mkdir -p docs/modules - @${PYTHON_ENV}/bin/python ${ES_BEATS}/metricbeat/scripts/docs_collector.py --beat ${BEAT_NAME} - -# Collects all module configs -.PHONY: configs -configs: python-env - @mkdir -p _meta - @cp ${ES_BEATS}/metricbeat/_meta/common.yml _meta/beat.yml - @cat ${ES_BEATS}/metricbeat/_meta/setup.yml >> _meta/beat.yml - @cat ${ES_BEATS}/metricbeat/_meta/common.reference.yml > _meta/beat.reference.yml - @${PYTHON_ENV}/bin/python ${ES_BEATS}/script/config_collector.py --beat ${BEAT_NAME} --full $(PWD) >> _meta/beat.reference.yml - @rm -rf modules.d - ${PYTHON_ENV}/bin/python ${ES_BEATS}/script/modules_collector.py --beat ${BEAT_NAME} --docs_branch=$(DOCS_BRANCH) - @chmod go-w modules.d/* - @# Enable system by default: - @if [ -f modules.d/system.yml.disabled ]; then mv modules.d/system.yml.disabled modules.d/system.yml; fi - -# Generates imports for all modules and metricsets -.PHONY: imports -imports: python-env - @mkdir -p include - @${PYTHON_ENV}/bin/python ${ES_BEATS}/script/generate_imports.py ${BEAT_PATH} - -# Runs all collection steps and updates afterwards -.PHONY: collect -collect: assets collect-docs configs kibana imports - -# Creates a new metricset. Requires the params MODULE and METRICSET +# +# Variables +# +ES_BEATS ?= .. +GOX_OS ?= netbsd linux windows +GOX_FLAGS ?= -arch="amd64 386 arm ppc64 ppc64le" + +# +# Includes +# +include $(ES_BEATS)/dev-tools/make/oss.mk + +# +# Targets +# + +# Creates a new metricset. Requires the params MODULE and METRICSET. .PHONY: create-metricset -create-metricset: python-env - @${PYTHON_ENV}/bin/python ${ES_BEATS}/metricbeat/scripts/create_metricset.py --path=$(PWD) --es_beats=$(ES_BEATS) --module=$(MODULE) --metricset=$(METRICSET) +create-metricset: mage + mage generateMetricSet -# Generates the data.json example documents +# +# TODO (andrewkroh on 12-26-2018): These targets need refactored to work in the +# Mage based build system. +# + +# Generates the data.json example documents. .PHONY: generate-json generate-json: build-image ${DOCKER_COMPOSE} run beat go test -tags=integration github.com/elastic/beats/metricbeat/module/... -data @@ -71,11 +39,4 @@ run-module: test-module: ## @testing Tests the given module. Needs $MODULE as param an run-module must be started first. test-module: python-env update metricbeat.test go test -tags=integration ${BEAT_PATH}/module/${MODULE}/... -v - . ${PYTHON_ENV}/bin/activate && INTEGRATION_TESTS=1 nosetests tests/system/test_${MODULE}.py - -.PHONY: assets -assets: - go run ${ES_BEATS}/metricbeat/scripts/assets/assets.go ${ES_BEATS}/metricbeat/module - mkdir -p include/fields - go run ${ES_BEATS}/dev-tools/cmd/asset/asset.go -license ${LICENSE} -pkg include -in ${ES_BEATS}/metricbeat/_meta/fields.common.yml -out include/fields.go $(BEAT_NAME) - go run ${ES_BEATS}/libbeat/scripts/cmd/global_fields/main.go -es_beats_path ${ES_BEATS} -beat_path ${PWD} | go run ${ES_BEATS}/dev-tools/cmd/asset/asset.go -license ${LICENSE} -out ./include/fields/fields.go -pkg include -priority asset.LibbeatFieldsPri ${ES_BEATS}/libbeat/fields.yml $(BEAT_NAME) + . ${PYTHON_VE_DIR}/bin/activate && INTEGRATION_TESTS=1 nosetests tests/system/test_${MODULE}.py diff --git a/metricbeat/cmd/root.go b/metricbeat/cmd/root.go index 13b7c4bf200..1fffb83d8b7 100644 --- a/metricbeat/cmd/root.go +++ b/metricbeat/cmd/root.go @@ -28,7 +28,6 @@ import ( // import modules _ "github.com/elastic/beats/metricbeat/include" - _ "github.com/elastic/beats/metricbeat/include/fields" ) // Name of this beat diff --git a/metricbeat/docs/fields.asciidoc b/metricbeat/docs/fields.asciidoc index 3f58984ee37..290dce18c7f 100644 --- a/metricbeat/docs/fields.asciidoc +++ b/metricbeat/docs/fields.asciidoc @@ -26,6 +26,7 @@ grouped in the following categories: * <> * <> * <> +* <> * <> * <> * <> @@ -8272,6 +8273,37 @@ type: integer -- type: integer +-- + +[[exported-fields-foo]] +== foo fields + +experimental[] +foo module + + + +[float] +== foo fields + + + + +[float] +== bar fields + +bar + + + +*`foo.bar.example`*:: ++ +-- +type: keyword + +Example field + + -- [[exported-fields-golang]] diff --git a/metricbeat/docs/modules/system.asciidoc b/metricbeat/docs/modules/system.asciidoc index 68238663519..247cf13729a 100644 --- a/metricbeat/docs/modules/system.asciidoc +++ b/metricbeat/docs/modules/system.asciidoc @@ -31,7 +31,9 @@ metricbeat.modules: - module: system metricsets: - cpu # CPU usage +{{- if ne .GOOS "windows" }} - load # CPU load averages +{{- end }} - memory # Memory usage - network # Network IO - process # Per process metrics @@ -43,7 +45,9 @@ metricbeat.modules: #- filesystem # File system usage for each mountpoint #- fsstat # File system summary metrics #- raid # Raid +{{- if eq .GOOS "linux" }} #- socket # Sockets and connection info (linux only) +{{- end }} enabled: true period: 10s processes: ['.*'] diff --git a/metricbeat/include/fields.go b/metricbeat/include/fields.go index b44d5061e0e..73a640c9297 100644 --- a/metricbeat/include/fields.go +++ b/metricbeat/include/fields.go @@ -24,12 +24,12 @@ import ( ) func init() { - if err := asset.SetFields("metricbeat", "../metricbeat/_meta/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("metricbeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } // Asset returns asset data func Asset() string { - return "eJyUUktu2zAU3OsUg+yjA2hRoPAVcoEXchwS5Ucln6zq9gVpOqiN1kW04+ObD2f0ih88FpgcY04ToF4DF5xuZ8tqil/V57Tg2wQAp5xUfKoDhLNnsBVyER/kPRA+QUIAL0wKPVbWecJYW6bO8YokkQsitXhTqXPMdgvsl39Vbd+bY8chn6GOuGKgThQfTCyitP2ma88D2CwskOCljskq6paxdCcc/UeRq6yWjf8y285ftHrDPnF7J1ZZLt5wFmsLa32udsop0bQpxv6nrhjnE3EuOWJ33rgHO7u0IkOgUdoZb87XT9peGaIcSFnxTqyFtZW6O6bOY0XlngIhGwnheHhO62DwFv7cfKEdEV+H/CVxDX+G/J8nt4BtNlu8/WQzvoddjooecsaLzeZlnn4HAAD///yQ6RY=" + return "eJzsvftzHDdyOP67/goUXfW1nCyHD1EPM3XfhCfJFsuSzIhSnEsupcXOYHdhzgBjAMPVOrn//VPoBjDAPMjlY3W+KuqqztLsDNBoNBr97l1ywdbHhOX6ESGGm5Idk9cvzx8RUjCdK14bLsUx+f8fEULsD2TOWVno7BFxfzt+BD/tEkErdkx2/s3wimlDq3oHfiDErGt2TApqmHtQsktWHpNcKv9Esd8arlhxTIxq/EP2hVa1hWfncP/g2e7+093DJx/3XxzvPz1+cpS9ePrkv/wMA6DaP6+oYXsWHLJaMkHMkhF2yYQhUvEFF9SwInsU3v5BKlLKBb6iiVlyTbiGr4qxgVZUkwUTTNmxJoSKIgwnpMG3Ob6mGI1n++BWjFgkc6kILUs3eZbi1NCFHkUdYveCrVdSFT3M/fdfd2oliya3uPnrzoT8dYeJy8O/7vzPNbh7y7Uhcu4H1qTRrCBGWmAIo/kSQe1AWtIZK6+DVc5+Zbnpgvq/TFwekxbYCaF1XfKcImRzKXdnVP3taqh/Yuu9S1o2jNSUKx3h+yUVZMbCKmhRkIoZSriYS1XBJPa5wz85X8qmLGATcykM5YIIpg1r9xdXoTNyUpYE5tSEKka0kXZbqfaoi4B47Rc7LWR+wdTUUgyZXrzQU4e6Dj4rpjVdjJ8bRKhhX3ro3HnDylKSX6Qqi2u2ukf4zM/riNNhAH+yb7qfo5WdCiLNkimLYJJTzQbHSfcglyKnhomWMRBS8PmcKXu0HEpXS54vAbHGHqa5YqxcE82oypd0VrKMnM5J1ZSG12U7jJtXE/aFazOx36799LmsZlywgnBhJJGCdZbjcU8XTHi0OsZ4Ej1aKNnUx+Twatx+XDIcyHHLQE2OrVBCZ7Ix8E8t52ZlV8qE4WY9IXxOqFhb6Kklw7K0BDchBTP4F6mInGmmLu1CcfOkIJQspV2zVMTQC6ZJxahuFKvSFzJPjZpwkZdNwcifGQWCXsCbFV0TWmpJVCPsZ24qpTO4B2BV2T/5demlZV8zRmpZN6Vlh2TFzdICS3mpLSsxAReqEYKLhR3VPrTgRItRlm/ihjs2u6R1zeyW2TUBWYUVAW+16xSZQ/pcSiOkYfE2+KUeW0K1I1gStTDBkoH7lnKhJy2MmSUCy//nvGQzRk0G5+Tk7N3EcnS8GML46bLc9tK63rML4jnLIkKIOU4hmUYms6RiwQiftyfBEgfXRNtvzFLJZrEkvzWssTPotTas0qTkF4z8ROcXdEI+sIIjUdRK5kzr6MUwqm7sadLkrVxoQ/WS4JrIOSA+S9gKULhHqrvr7d/DYP6kWKLgUoTnQ5yKjFxVV5wd++c/cOiEfLIUiojpPcv2s/1dlR8Ow2n/fxtAvrekciWElhGgOEEBCnekkSEt+CWDy4cK9zm+7X5esrKeN2VMG0jmyi+cmJUkPzg6JVxoQ0XurqPOUdN2cnvekrFmjbFcoamoADnFMlaiWU0VkinXRDBW2AMoHEfuTZcM6Ik3l5WdfK5kNYCT0zkRkviDBmjAE+gfyblhgpRsbgirarPOhjZ9LuXwdtud3MZ2f1zXG2y3P+52AqINXWtCy5X9T9gHe/lrFDQCGczWEZ+0N2WWokwE1hV2oH1/BWO5aWasfQX4OJ9bQkmGGyeahGAqmi+5YMPod0MM7wEvtrEDnwT/rWGEF/amnHOmcDvs8QI8POZzuNjh9tffDexPkMQsU8dLAL5f+d0Als+LwSW/oEfzp/v7xfCSWb1kFVO0/Dy0ePbFMFGw4m4IeO3nuAsOkCVZIVdVtCzX7hLShOZKaquxaEOVFTQsf5giqfNiGm6tq5Azf9RO6DGTl7wnUr2Mn20mU524gSyHKNgcZDmKx4oLbjg1EpBBiWBmJdWFFboEA60C2SbKSootqCrglrS3pRR6Er2JV+mMF1zhA1qSeSlXRLHcKkQoD3x8eeaGQ87VQtYDxz6wr0fAwC2gmSjw9fO/vCc1zS+Yeay/w/FRqK6VNDKXZW8S1D3t3nWmU6BSM6uMeHHEI8MoKjQFADJyLisWpAkru9s3DVMV2fFKslQ79nJSbM5UMr3oLEejlON+dnIh7uGMBUEwkndhWmJBEQu/g+3gMcyoazpi8UNbTtXoBpbfSp1cWJB+bQSiGIRQJ1Y60wUZGKdFpJXG2tEsueCW7MIBDgp6cprceHt+IsVqxazgBtcn3uRW49SsosLwHLQA9sW4S599wZM3cXcr1+HSN5JccrtG/jtrdQa7RqZAj9DcNNRh/3RO1rJRYfQ5LUuNqARpw7CFVOuJfcnfO9rwsiRMWHHakaNsVI53U8G0sRRg8WiRNOdlac9aXStZK04NK9e3FBlpUSim9bb4I5A16g6OoNyE7oILbKOa8UUjG12ukXidWYeXZTKelhUDuxYpuTZ2z07PJoSSQlZ2E6QilDSCfyHa6vUmI+QvLY7xPk7HM9IpOIquPGye6KeZezBFHA6LF2BYaqWHokFjCarW04zXUwvWNEMQp1ZtrJkonCwIhJYMae8KUGyykZu83vAmT168Yo9Oz8LCHXfErRpYrjPeWBClCto+OT27PLIPTs8un7UbPAJ/LZXZcAWlFIvN1nAmlbkS+mDIofk2BKF3Jy83QqIHA4lhG5A4FogTdGb/hrxjRvFc9+CZrQ0bYAKb7EoQOA5eHG0G4p/tZKhPW4Ukvm6MxBsp0oL7BATXwJ2hPdyQsnC2jcDtgbpgsZjvJK0fk4cdUesaaH5kMhiwqFVBlFrH5itKdM1yPuc5KSWabIlipWdH9o67bMU8/COVhTM1hzDFL+2ta9cLTDbmgDF644uGkI4vIkWGByiZfHjrwuhMfq4l7wB8BX4IeSvFgpumwJuzpAb+kSpvgQi+/V+yU0qxc0x2nz/Jnh0cvXiyPyE7JTU7x+ToafZ0/+n3By/I374dWo+93blgwnzu2DOuW1X/PF+zptiuEWYdWdJ7qcySnFRM8ZwOg90Io9ZbB/olzgOzjsD6kgpaDAKp2IJLsXUYP8A0V4H47w2bsXwQj9x8BSRycyUG30lhFKPlVRvNtfycy+KrbPbp+c/EzjW24SdXbPbXgNNt+LVg7v77yyFIx7Z7QFi+NYifNFO7Xi6O3kRN2jPRCXEGJ9SG5JwsFBVNSZWlGOdmUQyvhexRf7tQWg1GPuQuXOFlkjNhmHJa7ryUUhHRVDOmwBcCxg2vT+rO0AhiSerlWnP7F+9EyT0p6x447yWY5+zr5RrdUlwQ2hhZwc21YNKve2THZlIbKXaL/FHH0CGbomvnaB9tZub4Ae/b6BpFCUA24AfhYq6oNqrJTRM7S1rE2H3oGWCv9Y/MnbCGZkEdG5CpIK9fHqK7xt5yc2byJdO4d3Bn82h69EK1MNuLPnUlJv4vroOZMQUiDKga4fxXilXSBLMkkY3RvGDRXMPQUeLcMfGQsccGPnbUl3o+cdh2KPBCueljR5CbIEXcZjpyTEC1kpe8YGoj/ThQI8sP7ybEJxc+rNgDEryFsaub5YcTssjZhEiVMhq+4IaWMme0qwsEA8Al5SWd8dJeZ79LMWCpv2qpjd5lVJvdg/xuKz6JwCC/gw7sPRxAkkDr7WaOLAZvko1WMAZjf2WbLcDdLLeB2tv8szvaqQPofPfg8MnR02fPX3y/T2d5web7my3i1EFCTl958oMlJH6HcfiH/Xr3Y0kKoEXX1SbA+V+HnVC3wa45zCpW8KbaDPB3njtF3qoN4KY5yG/3RhPPnj17/vz5ixcvvv/++80A/9hycYQFQgPUggr+u3NHFiGGxLk/1m3gSHpRWyGAQ4gDoWg42jVMUGEIE5dcSVENW5zaC/Hkl/MACC8m5EcpFyXD+5z8/OFHclpgJAaGv4BnKhmq9dBE88TKHOUicHovLXQebyYxhK9SC7kzY/fCnSJLvFfeu+AQtAk7d4YzDct5PAzYTTXzUy5ZWVuxGcUWvDFnVEdEE+bQXs9fW0ZleKtt3NCY7L7eFgv4gMOTigq6sDc68NiwjEEvGMZ3jfCtbfpEA1iEdw3HYf6KLrbLNGM5AmYLJgQEbUU1mTW8NEE4GgHS0MW2YGwPi4OQjt2T28RUC0WrbfcASKIqNwEhibAkIVjx823uP0COD04kXf4VuYhSDvaq98NmPCz6bgMXYuyhAj0VjbR7Ljb1ikFv4DxErtfGPZM/srsr8dk9+Lz+8D6vaL/+UR1f40v4+t6v62HZngss5jL/aH6wmG147xLwvT+wM+wKmHvwPnjEHjxi/VU9eMQePGKbIvHBI/bgEXvwiN3WI8aC0JPkmJKN9cJ3zNDd+GYM16uRdrC/U+rKYOLqNZT1+uW5nxd30AUqSlidJkZmZMpynbmXppg3otKMUXupVo02GOAN21SOhKfaP79Y7em3hqk1BNtihHdQKLgoeM402d11boSKrj1AFsG65IulKdfp4Qm5etGKYAxYFYJZWrmNC8MWygXD0uJXCzZKbKmGmC9ZRQNu3D07uiQwFDcKswXdN1yTA0gCmjFDD8mgbS56oR00EKpSsmOMfR092jjrr7WI5pBU4wKCcXxQV6hYkwsuiswyGrvSCoPT8QWzjDyfmP9mt6Zk6Ne0m+hT/iDCG3Muu4lz3GhWzls3phU77fgJNjd3S36tbI65y/PrwzqWGnsdQFGK7DXQwG63KaGDc3cux3vDBM5tR/dcHc3NfUwEcr3sZVS8vrxNkirSy5DfwEeTD7sOSrkg6FxQPE+oLiMn8GuapeEVH0+TdoFRjigYnZa4atomfmbkbZugDFzP56xCvgKvmL2FvQfUPrVDtF+HVFc5j1Od/SDUp0wSyHjx4Q4uhKHNI0Gtl8wYJo14ZZR6G6FV7GK1dIJWsoE0lBkzK8bsHD4+XRQuPoEpN4FL58C017yU2q7kxKP6erR6q5FUzAoNoIeUMBZmAsA/k+RgC8QwQoczbhO8xiTQorZilVRrYtkf5Bi4gYpOpvJlUwqm0BHP25xl95rOqbALhbzl2130W2Vdp6/s1gc7deC/t8geszdCH9L7MRPbc27HT27WscSwBb8Ev2n30K/sufRO5aR6gh8xGctfPRMwptsB3OmJxDevTeN1FsPWOmKTQS1/msIb0wmZakMNs3+hJVXVNCO/UGUPACR7zxsIjwrSiZxbaWVCVqnoUZcUjEgu3sUKz64ABs1zVhvIiHWhL3g7eQlnQuqSUQ0MMxkSnAc5bbrCciAEgHvkgnG5Olu5ZJBPuBnGtj+IDEu+WLrcp+EbYGTnTlM64BoZESRa2W1fUuH2MMNktOnEOwM0E9plI7XKCE3JyoHfwhlkWeqT0TYgg3TD2D2QQTJio9kAGQzRQmN1TXAwA48dpgpc2TZoAtKV8WbKaW2A87pM5CuZRNA9Xf5hSx9cpMQQCKA9+EuaWiAdNfitnUbXCxx44PW7tCjsWXcX9i5c2KyYpls5nfOS7eaK2etzim4urAvDdZvv6u9Pt1Ju56pA4R48r7BHNdXa4nUXU/aGN0o2Jpfbcxrb1bgprmPlp9HP0W5R4bZ7EpGwTqMz2xlSY4o9lj59tL3/8WW3U7rJc/DlQXmbOeVlo1jKmJMxx5n0TU5kOuQok97wRLo1DG/wtkoLfGAgAaLg7bDSDCgi9s8ZroheSoiHCoEpbUEpS7BgRhpToWTRlFuviIGzOFvVRnUhMDE9ZibJF9GoOtioMIdfqlDZZPAIV2v9WzmMDAuaZpt6Sm+NDTfNmDlDCkvUaGGcunen5LFlZ5oZsuekbM3MdxYr6eqtHpAaVJqZ/coK54gu4MTJKY/RHLKPnVWlY+9xla24aIHAKjlgigqP3H5bAkaos67ZPJGARk6YZpdMcbOpBDTmYdx5vrPZHp27+TpXmgejI9z8snRG3+Gww/CVExUqBi5CYTlcFKoYtMBQNMvuz7eaNDUxssN1k/vJcsSKXjACOpWbjjv2m0uhuTagVaKdb9CEFi4rzPMvb03535BPlohMIyAj3Nk0Xbg4x/pGeilXAuMCc1OuyZoZS67/RwqJlfKkukiGtPKD5e2arFgSmPINOdXk//vm4PDoX3xcYppub7fq/6DqnlQXFhA4UWDJaG1kyYAYTMrzCz1IpTvnrCYH35P9F8eHz44P9jGM9uXrH473EY5zljd2u/Ffyb7ZnbNSCIp2Ct84yNyHB/v7g9+spKr8BTRvrKiijaxrVvjP8L9a5X862M/s/w46IxTa/OkwO8gOs0Ndmz8dHD453PAgEPKBrsBeFqq3yTn4DlQg/08u+rZglRTaKGrQEIR2Xm6GtArH1vF2clTBRcG+MLRlFzL/HOUWFFzb7S+QY1FhX5+xzohYBo4VWKGEh4pKyjIjFvzm089on5nG2wtzH5M5LROhvQUj/q13aJZUL+8k3rXU1cbMD/3t5M8vX228c2+oXpLHNVNLWmuoaAY1vuZcLJiqFRfmO7uZiq7cPhhp0QUyVIfhkI03N1ygjepGFdxPrNErN3DCgy2DEFRIzXIpiiH3wOnckSuoCEBj+G8mCiCxC2F5EnAr1A3ayLKuZ8Kz7JwFng2QCKRdnKGNYO7Li7xiGye53EojCEerXURUiS+pWvqtJqFGa1uBzhns0lvHgZ1q/qVitFiTxyxbZFaHok1pyPlaWyIJA+vv8C5LxpO1K6QDwfIrrofk2pNWrg/z4+zAGY4JtcdcCjBfnr5ycOy8bpSs2d5JpQ1TBa12vktVQjqbKXaJ9lT/yfnHne/ARCvImzfHVdVezZyW/q3d/afH+/s73QpKwVSDSuaGVF/ExS6v3FKnDOPovby5wUq07uUxibrddCuJc224yJ0F+9+i31y5mOiRn7wnkTglHG5P93Lmy4kCqBpr07VU4Tn0sNzkagB1gEH2U3KBkmZn4RxL68b18JIxZ+uoDJpiSOvgasppmZFpu84pehbiCp3ht3RrvhhFc+OvlxjCSWffArBhCdyXAk73x1VayzF6tq6tHCXB4WBvYDTKWAUIPXwDm9PjWe0rA/DGHg07Qcsdu5D3ifIaWvMl6gB/6eZb/AfcT+JVtFyrrXnX1wksm70BC73pYUM2fu1RcyYnyzgGkURzwy+t9G/xNOdKG1/ZdGxh7EY2/5suy95S1y4KpoqXFJaRjGiXVNLrV6S4vvisOyzwKsY4LyXd0EP7gesLAmNjsVMuexqa493aCeZEyxLMPb4Onv/zSTMsmYW1yL7VQRtyIoE9bdcu8bOQqrrBBt5gre/BVsl/ZwXMd82yJ8FdVoLUvm95yMH+/hX1SCvKBYb6YI1RKA5m9dEKo/WpAD+iq9WGxj+t+aJzG7TAaSiDDsOsKNaq0YwR6syusBTErVNOaVn6CnQDDu45D/y848x27u4f2hfG8HgCo3Q9psSZRlIfFjidNZlZEc+zQufItc8h2Ma7JcG+AZBnAIavCe4vOaq1zHlbCxn0Rl8tMClth0jbczYT70MFIp4Qs5SaucroaK2GyU69PE7eScGNhOvhv384ffc/voo62MNcRjoUFITwETT1entqP6eGzucMLwv7encNJiqi74w+N/LItgHkplWgxg7MsCScbPMZtUBJl7Nfpoe1LaCvFsx8vq85P8JwsAQQO/S6Krm40INzwwRJjNkdZo6ZA+xmGL13xOGAh2ycUq4Io3ptcWQYkMps7YjNDxFZP4J2WjslrYvQ2P59h/XAGsCZDCbOCSm4grPmUPrdIEoLlhRxuMP8r2CkkSTXK0mKizgG6A4gnNqBWhOWD/hBjiXC3x2fGQKliWIb7om2rDwK3gOrX306ffUdchJ3m0aRWo/P4ccWWUSuRKeEWjA0ruLE4rtSDYz2LZjAVS93MqR93A9qzhSvqFojbwOc/NhZ9vDsSUrGvc0fVyIYnbu6PXmGw7//7Gh/GKB3lmbjXeeCyNzQsmOLHQRN8983BS0xEvVpwI5kp4b0KctCnG1RWpGGFoVXY6Z2tCnhqcwCTuLpMIupkoTyq4FM5PEEyLdWUoZgKkCSi5QAIbqShT1BxeDs+TZmr5ihGFMOnutiQNiKCdbnSEWPNo8mREKNogkr5mTBNhIW3tFOpFSWBZbskopeZHASSXUPUV/3Y3EbD1rFtfvy6cC29+qSGitl/h0yzGPnI4A2sO9RQwC37W/aJ5sW5fZFZxIZ29VVJrms6sZgVKOr2gJR4xDRFzURGbBdxl1EWikVe4aIKEQxbRWCNTnE9SGMdqWA1zZmcUlVsaKKTcglV6ahpa+ZoifkFRR2iIpYoLrzUzNjSjADxtSC3TZP3K5qmBju7oV+48aOi8EMmW9MVBDeWw1W3t859RBO7ZZWdumKmUZhZa4Na8xsa4XvN1odpGs6Gx+sK1pTtJZPkNqOeqlLv2nKjkf8t4aWwMV9UrwdxQf9WmBcsFMbY2SlFQxH0vZsd8pmsZwXoecRKslG2m/G8tO3GdSK53nIwneiA6F6T57rOYHlbyZgQHDOvMDf7RXAxWLepGUGuEALzEb1eI6TpI/Geyen0K0BtjDrI+m+k/iBY/Dap55/3Zz3N+54XTP7tnufjByvH6RylZF84TjXV8NZRJKyeXYoaGA0DaWtpql57nROLquJr7cTZcoF9juJ7f5RHabIqJOM2BLhBoQX4i5VvuSGQaHFWyO1dfh+efHs87OjDZ26P9dMUdO2ckqAGUh0l7GM6y7zdoxzGCN642ZJ7/bw/XzebWU2HBYsO4DHO6tYA97942R0I+vPDqddr7xFXw1WqfST3dAzrPO41+JoF1jv57ipG7lN7ryX5JLBt5B82tt3PzF5DD28ciaM1BPSzBphmglZcVHIVde+3dajomrFxRYzaVvyfkdzSyT/uXOHxaJCPwDtnFa8cwnfFd6CzTgVN4H23IHhtgKaexZLaiYEx5pAm8KZLuJtGVhMP/n07qs52M8ODrNnuyo/vMsG+HxKEOIVXRFtFFSSHFjGhZV8y3tdxVF2lO3vHhwc7rp8gbusBeHbYEkPxUIGdvehWMhDsZAU1odiIQ/FQh6KhXRAfCgWcn/FQpbGdKzQbz5+PHNPbls83w4RYlpuW2gWe+plFTNLuTXT8htjaj8VwalG0kXQ4YGGIohdm7E4zMJIUsoVUxCOZfVkX/8jI+csPRE7b8OLL2nNjR0Bdm7HOyF3Tn36gRWtXr883yFEYzb7YNT8gpkJqSG/u25GEho9PmeyWGfOO7ItrH50FjygroBemHkIfGyfvpKqHEnU9rBDX0S1Yan+W6WE4fhtRhtQsp9+CHa7Qn28tzcr5SJzT7NcVntjK9G1FJpl2lDT6C43v241mwdyO8LG2QjO1mPoYRVH+0fXwPv3IBsH/O3pZrTi0D0yDzfHSPWbgxSw8aqU4XgOV6e8B4r4KA0tO25cJzH7E/rYohq0giWjBVOpiaNd1tGT5xswme0t5fyqRYySy4sXo1B7Iv/7IN/R+T1gPz6sXx391x3XBP+tyrtIxY+34cHV4gY6bmiS5S6jgjO3FDsAS32s3d2a/1YuWknUR6mPpZJjkekkI/+Xkw/vpxMyff3hg/3P6fsffp4Oovn1hw/DS7tz8uF4lh4ItODEere2C4tNSDdK/hpFY+eiwIBasH37IGKLT59FR7th2HCtRG8kw83YHKsllNyg39yQBhIiQqGLmqrBumin6N9UNFRZI1M3hauu7Qg19oRCG2KfJlCncfYkJg83Ulw4oFM3wC1+0ltgx7mDrtglvWQhp0hbGsPQmNyXi6vrkrMCPUVM5BLLeSsi2CpV6rhgGlpDXaLsm5eMCsilTUEfi4a+aWoi0dLlHH7by020kja4fZ03BGX0jdITE1bkooRTdvQ+ebh5VI4POe73Tc9lVTXC4RwDW+UlU56huWgLlQYtu1gL1/bb/XSrYA4/bMic6EYdewvoLRno1uNrFvyS2bvHeb2ggJ/06pFu1XSPpCEG9iNICr/wOR9exLZcuqeo3/18fgphfSUe7FVsa3AER97SNVMZ4fXl0cT+/zP7/5rlE1LzakKYyf+QeupVaqpdyzC+ORX0M9pPtkU7hJyevD8hZ669P3kPs5HHXoFbrVaZBSOTarGHaRdQqG2vdl/sInz9B9mXpanKjjeQkHNDRUFVAWj3hVT8t3CQuSa05AuBefd4+t4z80MpV5YXdsbT8NxbWSDrD1lG4xLAhtY3uA/PRoheUaFv0MHgZm0zoHiFDqcy2nGXUS60YbStrsLITzh+bH1LhgzwktKeFfK4KeoJMXmN52WX51UNByX77g95VK48Kyavh3cJ7uiem+hej8oJohwZLfrEolkd5fq8GzXjRlHFy7VLVsKKOulOLblYaBQrKp4r6RNlcOtpqWWbhxm/rC/WNZsQnv+WJhjPac5mUl5MiFlxYzDOK+ak3kKquWmccNPWa71kouhA2CbvhKxZlsvCCh7O7RzSOVGA2CvsDXJ6hrHxOgXPEqWGCJkVVz6j+o9pV7yKBimvhmnQc7Gt6EnPwxXop0H3DmFfMrAMTUgJfONXmlsCCFzAv/6Ph+hghO9huuCKba0S3Ss/uNc5vGxoFJ3PfbJZ8skHZsVXTGBtxfTjzlX1T4SLmWx6V9g/EdmY4R+4MEylyin+YFna4A+NgKISfRih/HZF6zoq3Oxqx1rZehda5JGqTeRzVXcnQXgGsSxlOFjoy/MAO863moDj3SLvkrPVWCHwYUg8qqUiNVO8Yoapccg63CWCsgtZApL9L8TdhRR0P9WwfBZtWo8S51KtqCpY8Xk7QZ5Ru6aQFu3yw6KfnNJfK/ll2Mh08P1hdpAdZIfDq3DKl1l/3l66wglUrMEKywA/6LVRA53TMyz/664J6uQ/GtbWZa6k9fil6mMWTCGUGCnLXboQUhueE+2kz7hxZ0rRpVwNWTTeMqoEZiRTE9wbC26WzQwcG3aroUT9XkDmLi92dc3ywR359uB4+fM/6/dHb/753Y9P3/1l78XyVP3n2W/50X/9++/7f/o2BWErfZuuNcyiJROuEvAAAa5n0irQnkeOlL2ZujZIMIIrwhg3xvLPfQ2cCZl6Edj9hCTNFdFNNYjAJ89ejFzDd2kMdS1O3Oh3woobYwAv7S8DmAk/Xoubw6O+HacTpuoDc9OnG2baiDBaP6W9Zjmnpeetk5CziUkJrcDscmhDH92CGZabiR8ZXsf09+vH2vX6n7tNonKAXi73IjAleaONrEKKDY4DDZYha8Ktq5OHL8WcL6AorZFENeIG69RybuxEUa1Sn+Yz54qtaFnqib3pVaMRLwapaK9WsB4YxKeB+Dsrug41E1oqPSErNktmjoaH6IxSak2GBrX4Ojl759buzGl+i2N7Gi3LK8xpTl7CYSHig4r1BFGJq9Jhf7UvN4B7rNvL/wpUdtP+yTtn2f6tYQ0OSV5/fAu5XlIAKfgrwhUKSrtWOBoJVXmgbmHBoOq7Wz30h3z98jy7RbOKr9d0sBeD/hX7RwY66U3+NXPJxqHo6bX3BkNggjhF0pN6AIy79fm5KkOjhaPjdW9rmSpOyy3bEgMYOJuL/OoDs7XMoGXaaz5sj696u0ndX6ZcRplllP5m83bKdsR1zXTWd0gmg029cqCmEzL1zNj+nRca/lNrV0j8yxr+IssSX0aWbv/WsuVhv6Yf9iEP5yEP5yEP5yEP5yEP54q1POTh3IXhPeThPOThpLA+5OE85OE85OF0QHzIw7m/PBypFlQ4N6L70Gsy/V82D0OLh/XXMROK50tEH1i1xnqNVTUVa3vpImLCwLGW2Ykey9J+rEtW1lCelCpFxcJ3KjGuV07U5oQKDAOEwC7XTNEFX4Z548XcNr53m+Fp8U6RXp28v2+lrBh3WUp5nW7RI5rz5jR3V225rymPaslDGvKgftzTjgd04xtS0oBWfL/UdA/acFcXHlzInY/E1XrwTZZ4xaHpacF3gbOv/14F5a1038FF3EdC0rV6700QPqogDoLf03rvAv2V+u5N1nCdrku6DkLnIUnZ3lny8Da9x0eZXWh5nI18SUV7U0LfJgjv8D6bpG0YRGiHFsq82EtOrwsuiQPwkSf7Ho5ZzYspkXPDBNGGrrWPWPKdjrGJuVVIowiYXNYc1XKobFjKGS2j3nce5EjouSkv3bi62uZe7LOAo5QjunZorqfQVxUQPEgDbI64rB9o00CseMmgsNdC0crJvYpoXvGSDgfvjC6oHkTuPaSB+dXUFCrE9crXtSW9FjfJQ7sVRqlaNNVA4zX75x1dWwUC5U4k41pJw3IDDmVu+CUb9mhF6P3vHa2XOxOys1va/7fCg/2vbwn2bOd/hhfPvrC8gQ4720LByQw6LjBMJXFn1DOIdvrBVe01Wu3NuNgbpR7gjtvePZhkJGzTrgR+n2DGEh4Q45u4UB3WilGiL6nAgOK4803qQYnK2BFKZkquNPjyfPKXA8jjcsVmpIbOML5VoxVdxWg/DuhCV2R3OXVtYvbh0cZ+KmjNc/pqOw1d2nv7cP/g2e7+093DJx/3XxzvPz1+cpS9ePrkvza8vj/6nvcxmbo2LyOgr6S64GLxGaOOBlt130YC2VvKiu3RMq5vfy3oDhYSYPHWzuSKT8QNZ9VOxY0PycNNxY228xjDLs++1POc5rzkxooNNb+UQMhUyUYUVlrgDKvst/1piU8yhd90tzeHi4HXjEF36YqKtVU/ctYGiXyMJw1jYpdA8Duj4llNCGSuhXBhPFTcSQ26lgKSDF1CYCsaTx3assgbfAJNWxUzLO552QZqMD2J0i1njDSiYApUvxCOoyYuLHMSx2ROSF5y6OriX7IikI9Hi2NfM3KKjVvcsmhZQkCnkS3IvJ5OUJijIF0JhxdACnWJFadnxCh+yWlZridESFJRYyAPEDzzBiagCjourkM0ejzJMc1mWZ4V09tW7B4ImRk9SJuGzZyUIcPZogVISPryn5105yhooxevd36LaD330UDSpaM0qFYaRV/nUggXAg+XAsZLKbagqsCAMw3dOibRm5jYMeMhBtLKwpialUtVaOzK9vHlWWg3g81tPWQITs64/bfDFBcc2uCd/+W9i7t8rEPPAztUOz0Oj5VXQzZZdw5XCrxc9xffifMX2vcXB3bgAuUIzU3jTZzYXYypiuyEkXawvvzcxZz4mUUHWO3rL8PPTt3x9tiB5FRfdzVHBqY7g8ewu/ao58nQFHp4I+Rt6B6HsMZfG5G3OhQed/fd0DAtCoU00WCWTnCLdtGgPdjw9yUOv+eBT1s1oMpHC8vHKyoMz32kv3d9fsHGAZO2T7RVEOdNaV+45HaJ/HcWWWIFyZkC/bNNefKsSoXR57QsdWg76Lv/I69yOcTa8LIkTEC3Y3htJIrdImnOQU+hda1krTj0JL4lM3IsfFuiJgYwYU853JJwZ2CiuecX1YwvGtnoco2069rw8TL19uugq0HIFHieJ4T64uTA5xsoay4trWSE/KXFMVbwTscz0mWnKbpq0x2Q5qeZezCNndtd2UTYS6PNBC8aDCdFjWdqLyUL1jRDEKf2/rM3GKT4u+L9yZDQjNSKGWNm7O1HXMaRjsmrL/F+73gKyOnZ5ZF9cHp2+azd4BH4b5DqegOlWCpzJfRfP2T2SjCQGLYBiWOpOEFn9q1kebQ5QC+ONgPxz5D2AR1S2vROF/eIuh9eE2MEdJf8ixbaDRW8M5ePsQm4PVAfwnsewnv6q3oI73kI79kUiQ/hPQ/hPQ/hPbcN73HFJfomjvbh5gEWvlJFV5828W9SQbCNvTfbvlwY80Njz15ZQgTFWODOnIvClVPzfkkoPYOWLH/Hh/H89PaLTo7OPbSTu7d+S1GAjC9f2AiBFh9YwFjdMl54DQvbL5WhQ+caqdF/j69X9IJpq0TVUms+67TLN7KL1SidE3dQROUNx0ELHZu8aVIxCI1RnIkcfBpaN0yj5cOOqVhhF+Paw4H+nwxoRToXp+U7NfPCt5cOuYSiaGkBLQVcLKBBpWs714W0DUd58pw9ZbM526fsWX70/fPDYsa+n+8fPD+iB8+ePJ/NXhwePZ+PFCq6U6Zd68hgJdWG52ia3XWr2tCLEQtCnubbxCt3pq7IvYp5XRgAsrFcOzjoCAuG4lApqpQrDVxvJZPhPLpbhQ/aofmTqFri9o0S7e+uNVRKkMitReI7w+A+11Nt6olQtA3AkiFOSqzU58C1pFFwbRSfNXYYX/gH6UU1YBsO6vtSaqOJSZfXHhG0ZXqbnl80Fs1wSxvxrLu6a1CyRc7J63jn4y2AZbkUah/PgXpVo00n4QrdjT9IRf7MqNH9Ybi2WCvYnDalgcoNdfAWBTxCt9RkXOcJmRMhiR8n9LbbRguykRNxE39elIt4q9MAA3ifjUuTx96eA1dPwiTt/SY7ZOxBsKNewy1hwE5+dApxSiyTzs6FilPJDNMEkd1jEnlkzVbSQ1+6nn0wQWdfbhqYdmMaepIdZps2XPsPF7LVIZ1YUtmEflruCEWc5IUVSamLMGYGWxSnAkuIFrOy7BDxjOCJ1UtWMUXLLdaPee3n6IkprXxBHvM53OTsC9emF29IInml7TAKLgVNaK6k1kQx8Lq7GmyBrHkxJYWE3qrDFe9f0KP50/39eTtjIGhwFHRk3PjZZiIufrKJtyi0j6fOFreXVC7tDrW5dyj2czgX0e2k2K/o1XBemn9kr0b3XtiiR6Ovb3wFbwYWxekf1X8Mb8YQ9H8Hb8ZVYGzRm4HH6x/Om4FgO/dAXIBphIr+CC6NcZh78D74NR78Gv1VPfg1HvwamyLxwa/x4Nd48GvcxK+R6HyNKlOF79OHt1erd58+vPU3rGtcj1VN65IZZn+doA6mc6sGT1z0LtRLpWZ5Sz1svPfNfSXeYicVVrQNaRoFNV19ELVZpqragB7wXhoXc8fFQP3DSVzsqwBEVpjbQrH/i0VeMiDEElPQuGgOkfalXDiqs59z7XLBfm20aYMUfYnLFuEdzSzu4BJi0MPnYXgKvo8V1QHoSdjproQ0Zm5I8Rx3a3BGtiyXx0dHT/bQ2Pavv/0pMb59Y2Rthx/5eZhaLDK3RSmn87BXqKPzyqpuDocQrdloNFVPkM20CnBIl09GnDaqzOyY04ndcIgMNskWKZZLoY1qwI4mFfEbhWSZnvgeiXY25FZbMIxnPOLbwvQ5jN5pDzcJBf13YCE7I8fwGNMmj6e+SVFNI1UYRh7Hzs2U0/tZ7StnohlbbbpdQ8s+FZhhZUnPnn7PX1yYt3R6iqtmCiX3MQa+XCPLBv0ovYcRKHSVgBMGOkc40k5qfgONL2ToouVsOn21KKA6XdGIPjtoFRlPchCGLRI/z4bGkR6+j46eDAJ9dPRkTPM2y23Rxhk0mRqjDHdsuyThAYPMk21BZg8ZTOCYVRB6AFb8BfO4u/Anw4S1dFjPEJnDuf5XONfsC1QnjsrnxzNC+DweA990LRlISDsOUHIopRmtBT4Pv1GYc9aY8Fa6AtNBBNr1245cVW1auGAJ+EbqO8QROo60xJNLZsysmKuvb1YST/tYzQVFF9UWG77aExT5f0BgmhuXUzL9ZhoRqZH16GZ+M8ikPfAja2s0U9vM9f7kxu/Q7ajdTevO2PfMAXD8cWhivHQken3DPCy7KRC/0HXhDNeBgVdR6oUu4uySRiRnJGlF58x3/wzdDMEHBppxbDm3TzjDBJj2RoKJllRjdwOzpAI9AsWk1UQElCpaeykc+AO4F4mctzAtN6xWY1RzXbEaDNlOHkUmz+R5r4TNQJmb1Af3Rwi5+rnj1Wi6IVjBtG/3Z+R83E/IDy1nLJEHrpIel/Z695UXSrlohasr4LRieNdmdYcU5RMAmLyG5miJ7HgN5/lWo5ZhQcH69JeUl20dgB7grKJ8e9qxPXgwg5f3RqBYUr01IciF/nkmsEzD72LWhKEC8CJUJpNiXUGPKPvKwCX0SbN5U1osT4E0oMSKcv+AQKkQTATtFYDyaZmyw05PpJwKe6G5a3wEXV3fwL3i60eIvwkMmqNBAO7XLDYBJJ1tQwFxAE1b0ktlJpYzralaj9w8aUGu9v4h8fOb3UI4pL+L2mgIq+q4ejm+BIS/Fe23a7SMhOH0Uq5cV+AVm4U4DAggikqtYy0Aqqzs1QTAk1pEf0DjlQP4Mo3HabE3qMrsvJO/87Kke0+zffKYny2lYP9CXp59Ivh38vM5OTj8fICt/HxpsO/ISV2X7Bc2+4mbvWf7T7OD7OApefzTm4/v3k7w3R9ZfiG/8+FBeweH2T55J2e8ZHsHT18fHL0g53ROFd97tn+UHezc5Mq4DRfGyTbDZexJavf/Bk0S7mdL/6O/k11IEn9ttj+MRGxdk90fLpE0bo5LB8hD8f+H4v8Pxf8fiv8/FP+/Yi0bFf//hnxkVS0VBZPTF4i4ZoY8z/ZJQfVyJqkqtC93lPlPIKml0YYsZPBp5TpbV+DqgqokK64ZMUwbTQopvjWk7cIewqIYNfGdghiiJQ+ZSTU1y2N3Y0XB7RVfKIpYANW6P2qnE9PVI3deHhz9m9Bi0crjrvqR/+XnVz8fD/VIdEbIPZbrPcy92Tt4/iKBdhCCIVIZ2ftuWyh3uzvIztklRBD3BeAVU4woVskQftRb0Ke6sCrRnJfM4nSPc73n3Ic0zyWUxvF1PvrCe1ZTE+Iub7CgM/vZkAgaCy4D01VchKZXN5junf3sNtPRX281nf3sFtOh3HPz+WLZKUQKeCFqZC6pB1YXxfjdZGnD0tDIpL0d3GDSoe3rT+roulFlOGrgj97oAJw3iufUUFLJosF6gI0GM3UWx4FGoRD3eJ77fprEe/do1w6LTO9REHz/jP8amOKl82BA/1gp4LsQF+9tQ2DuKF1JI9f661GqnCbM1vCK/d6K831m2+WoMQtGg25niCsZPMKRTCZnv7Lcy7f4j883QHrACpxE3/sSUOHD/hMImFIdSo0l6ZFJXtuPOjoElLcqCu7qh1mNAhIRXIIazBNyDsa6Lnayvm6TagKgYZ6UIyikj5akXvp/b0BUN6SnBLUuA4uZDE/b1TjtplD6E7qkxlXugoDc4De4RhzAl5KJr6TEFtjosG8Kqv/2CmiTyXxmTppLMk7RvqJdJ3DeJXx2U8JbcJJsKzTxhWHR1FdRrIY2YyGCJPTqHkjYgjg1Kyg8GqFay+O4YkXCHYJQHCC7ZskWwYXMof4v0jI5gVQUSP4ykuwUMt9pCbyUTRHRt/2n9/9AIiC1Sxkm+HfuV0RinnyqLcLbTFlaFJ/hhc9+SF8iVKrRQwAfZLWSlpG1FWQDStwvu19ucPnjJ/Yo/ijlomS44nA1ntjDgMnmZRGz2IjWaRYAg6Vec5oGX77uPPk5fGJvm2B39TQh2Ty8f+OZNtAXOnNdpzQMzOaO3+eI/K+ezH2wsfoTzeUYMC+5WX++8oKOJxz7atNZHaVtunE9Kt90Hoxm3miO5NXu+I4fFDK/ACp1DOGV//fA4cLfILG2m5nqfrNHWy+lMp9RmmhNe1TkS6n8fLuBGYwIWAEscq3DgHRyJigX4IXqyQYxmiJUDX8yuB0jU1V00ZdErp3NftU1Ld9g1s6Xm016++lKOmOlbpWEN3JlL5eKgpNMs3/twZIIp+RqAZVcEzVqcUUQhHCTOZuvo9s3+K+BQU6tdBlRq2sJZD/3UkEWEah9PkSe5H//5me+aGZMCYbJbW7+n+JnA1C0v4dLNr0x20FJPPvVp6n96NoTlQB9s1NVy2KY3G60iREGalmgIXpwqmbg7N52pjNZkE+nr/oTQaZATfP7W1Q7Yn8yWfSO+h0nkwUbQSEek+uP42YTuXNf0bo/EzjpsdjufU0XDTk85zUM8Lb4DMOOIPU6bn/3eXHc/xcAAP//7DLksA==" } diff --git a/metricbeat/include/list.go b/metricbeat/include/list.go index e2afb2060f6..147140f3d90 100644 --- a/metricbeat/include/list.go +++ b/metricbeat/include/list.go @@ -15,17 +15,12 @@ // specific language governing permissions and limitations // under the License. -// Code generated by 'make imports' - DO NOT EDIT. +// Code generated by beats/dev-tools/module_include_list/module_include_list.go - DO NOT EDIT. -/* -Package include imports all Module and MetricSet packages so that they register -their factories with the global registry. This package can be imported in the -main package to automatically register all of the standard supported Metricbeat -modules. -*/ package include import ( + // Import packages that need to register themselves. _ "github.com/elastic/beats/metricbeat/module/aerospike" _ "github.com/elastic/beats/metricbeat/module/aerospike/namespace" _ "github.com/elastic/beats/metricbeat/module/apache" @@ -102,7 +97,6 @@ import ( _ "github.com/elastic/beats/metricbeat/module/kubernetes/state_replicaset" _ "github.com/elastic/beats/metricbeat/module/kubernetes/state_statefulset" _ "github.com/elastic/beats/metricbeat/module/kubernetes/system" - _ "github.com/elastic/beats/metricbeat/module/kubernetes/util" _ "github.com/elastic/beats/metricbeat/module/kubernetes/volume" _ "github.com/elastic/beats/metricbeat/module/kvm" _ "github.com/elastic/beats/metricbeat/module/kvm/dommemstat" diff --git a/metricbeat/magefile.go b/metricbeat/magefile.go index 8e47869d968..3beb4b2a468 100644 --- a/metricbeat/magefile.go +++ b/metricbeat/magefile.go @@ -20,178 +20,34 @@ package main import ( - "context" - "fmt" - "regexp" - "time" - "github.com/magefile/mage/mg" - "github.com/magefile/mage/sh" - "github.com/pkg/errors" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + metricbeat "github.com/elastic/beats/metricbeat/scripts/mage" ) func init() { - mage.BeatDescription = "Metricbeat is a lightweight shipper for metrics." -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// CrossBuildXPack cross-builds the beat with XPack for all target platforms. -func CrossBuildXPack() error { - return mage.CrossBuildXPack() -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatPackaging() - customizePackaging() - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildXPack, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) + mage.BeatProjectType = mage.OSSProject } -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages(mage.WithModulesD()) -} - -// Update updates the generated files (aka make update). -func Update() error { - return sh.Run("make", "update") -} - -// Fields generates a fields.yml for the Beat. -func Fields() error { - return mage.GenerateFieldsYAML("module") -} - -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} - -// ExportDashboard exports a dashboard and writes it into the correct directory -// -// Required ENV variables: -// * MODULE: Name of the module -// * ID: Dashboard id -func ExportDashboard() error { - return mage.ExportDashboard() -} - -// ----------------------------------------------------------------------------- -// Customizations specific to Metricbeat. -// - Include modules.d directory in packages. -// - Disable system/load metricset for Windows. - -// customizePackaging modifies the package specs to add the modules.d directory. -// And for Windows it comments out the system/load metricset because it's -// not supported. -func customizePackaging() { - var ( - archiveModulesDir = "modules.d" - unixModulesDir = "/etc/{{.BeatName}}/modules.d" - - modulesDir = mage.PackageFile{ - Mode: 0644, - Source: "modules.d", - Config: true, - Modules: true, - } - windowsModulesDir = mage.PackageFile{ - Mode: 0644, - Source: "{{.PackageDir}}/modules.d", - Config: true, - Modules: true, - Dep: func(spec mage.PackageSpec) error { - if err := mage.Copy("modules.d", spec.MustExpand("{{.PackageDir}}/modules.d")); err != nil { - return errors.Wrap(err, "failed to copy modules.d dir") - } - - return mage.FindReplace( - spec.MustExpand("{{.PackageDir}}/modules.d/system.yml"), - regexp.MustCompile(`- load`), `#- load`) - }, - } - windowsReferenceConfig = mage.PackageFile{ - Mode: 0644, - Source: "{{.PackageDir}}/metricbeat.reference.yml", - Dep: func(spec mage.PackageSpec) error { - err := mage.Copy("metricbeat.reference.yml", - spec.MustExpand("{{.PackageDir}}/metricbeat.reference.yml")) - if err != nil { - return errors.Wrap(err, "failed to copy reference config") - } - - return mage.FindReplace( - spec.MustExpand("{{.PackageDir}}/metricbeat.reference.yml"), - regexp.MustCompile(`- load`), `#- load`) - }, - } - ) - - for _, args := range mage.Packages { - switch args.OS { - case "windows": - args.Spec.Files[archiveModulesDir] = windowsModulesDir - args.Spec.ReplaceFile("{{.BeatName}}.reference.yml", windowsReferenceConfig) - default: - pkgType := args.Types[0] - switch pkgType { - case mage.TarGz, mage.Zip, mage.Docker: - args.Spec.Files[archiveModulesDir] = modulesDir - case mage.Deb, mage.RPM, mage.DMG: - args.Spec.Files[unixModulesDir] = modulesDir - default: - panic(errors.Errorf("unhandled package type: %v", pkgType)) - } - } - } -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(metricbeat.Update.All) } diff --git a/metricbeat/metricbeat.reference.yml b/metricbeat/metricbeat.reference.yml index f1a7c6563c2..3136cc4bb61 100644 --- a/metricbeat/metricbeat.reference.yml +++ b/metricbeat/metricbeat.reference.yml @@ -44,10 +44,10 @@ metricbeat.max_start_delay: 10s # period: 10s # hosts: ["${host}:2379"] -#========================== Modules configuration ============================ +#========================== Modules configuration ============================= metricbeat.modules: -#------------------------------- System Module ------------------------------- +#-------------------------------- System Module -------------------------------- - module: system metricsets: - cpu # CPU usage @@ -122,14 +122,14 @@ metricbeat.modules: # Diskio configurations #diskio.include_devices: [] -#------------------------------ Aerospike Module ----------------------------- +#------------------------------ Aerospike Module ------------------------------ - module: aerospike metricsets: ["namespace"] enabled: true period: 10s hosts: ["localhost:3000"] -#------------------------------- Apache Module ------------------------------- +#-------------------------------- Apache Module -------------------------------- - module: apache metricsets: ["status"] period: 10s @@ -147,21 +147,21 @@ metricbeat.modules: # Password of hosts. Empty by default #password: password -#-------------------------------- Ceph Module -------------------------------- +#--------------------------------- Ceph Module --------------------------------- - module: ceph metricsets: ["cluster_disk", "cluster_health", "monitor_health", "pool_disk", "osd_tree"] period: 10s hosts: ["localhost:5000"] enabled: true -#------------------------------ Couchbase Module ----------------------------- +#------------------------------ Couchbase Module ------------------------------ - module: couchbase metricsets: ["bucket", "cluster", "node"] period: 10s hosts: ["localhost:8091"] enabled: true -#------------------------------- Docker Module ------------------------------- +#-------------------------------- Docker Module -------------------------------- - module: docker metricsets: - "container" @@ -188,7 +188,7 @@ metricbeat.modules: #certificate: "/etc/pki/client/cert.pem" #key: "/etc/pki/client/cert.key" -#----------------------------- Dropwizard Module ----------------------------- +#------------------------------ Dropwizard Module ------------------------------ - module: dropwizard metricsets: ["collector"] period: 10s @@ -197,7 +197,7 @@ metricbeat.modules: namespace: example enabled: true -#---------------------------- Elasticsearch Module --------------------------- +#---------------------------- Elasticsearch Module ---------------------------- - module: elasticsearch metricsets: - node @@ -216,19 +216,19 @@ metricbeat.modules: # Set to false to fetch all entries #index_recovery.active_only: true -#----------------------------- envoyproxy Module ----------------------------- +#------------------------------ Envoyproxy Module ------------------------------ - module: envoyproxy metricsets: ["server"] period: 10s hosts: ["localhost:9901"] -#-------------------------------- Etcd Module -------------------------------- +#--------------------------------- Etcd Module --------------------------------- - module: etcd metricsets: ["leader", "self", "store"] period: 10s hosts: ["localhost:2379"] -#------------------------------- Golang Module ------------------------------- +#-------------------------------- Golang Module -------------------------------- - module: golang #metricsets: # - expvar @@ -240,7 +240,7 @@ metricbeat.modules: namespace: "example" path: "/debug/vars" -#------------------------------ Graphite Module ------------------------------ +#------------------------------- Graphite Module ------------------------------- - module: graphite metricsets: ["server"] enabled: true @@ -264,14 +264,14 @@ metricbeat.modules: # delimiter: "_" -#------------------------------- HAProxy Module ------------------------------ +#------------------------------- HAProxy Module ------------------------------- - module: haproxy metricsets: ["info", "stat"] period: 10s hosts: ["tcp://127.0.0.1:14567"] enabled: true -#-------------------------------- HTTP Module -------------------------------- +#--------------------------------- HTTP Module --------------------------------- - module: http #metricsets: # - json @@ -300,7 +300,7 @@ metricbeat.modules: # fields: # added to the the response in root. overwrites existing fields # key: "value" -#------------------------------- Jolokia Module ------------------------------ +#------------------------------- Jolokia Module ------------------------------- - module: jolokia #metricsets: ["jmx"] period: 10s @@ -331,7 +331,7 @@ metricbeat.modules: jmx.application: jmx.instance: -#-------------------------------- Kafka Module ------------------------------- +#-------------------------------- Kafka Module -------------------------------- - module: kafka metricsets: ["consumergroup", "partition"] period: 10s @@ -359,7 +359,7 @@ metricbeat.modules: #username: "" #password: "" -#------------------------------- Kibana Module ------------------------------- +#-------------------------------- Kibana Module -------------------------------- - module: kibana metricsets: ["status"] period: 10s @@ -367,7 +367,7 @@ metricbeat.modules: basepath: "" enabled: true -#----------------------------- Kubernetes Module ----------------------------- +#------------------------------ Kubernetes Module ------------------------------ # Node metrics, from kubelet: - module: kubernetes metricsets: @@ -425,7 +425,7 @@ metricbeat.modules: - apiserver hosts: ["https://${KUBERNETES_SERVICE_HOST}:${KUBERNETES_SERVICE_PORT}"] -#--------------------------------- kvm Module -------------------------------- +#--------------------------------- Kvm Module --------------------------------- - module: kvm metricsets: ["dommemstat"] enabled: true @@ -438,21 +438,21 @@ metricbeat.modules: # Timeout to connect to Libvirt server #timeout: 1s -#------------------------------ Logstash Module ------------------------------ +#------------------------------- Logstash Module ------------------------------- - module: logstash metricsets: ["node", "node_stats"] enabled: true period: 10s hosts: ["localhost:9600"] -#------------------------------ Memcached Module ----------------------------- +#------------------------------ Memcached Module ------------------------------ - module: memcached metricsets: ["stats"] period: 10s hosts: ["localhost:11211"] enabled: true -#------------------------------- MongoDB Module ------------------------------ +#------------------------------- MongoDB Module ------------------------------- - module: mongodb metricsets: ["dbstats", "status", "collstats", "metrics", "replstatus"] period: 10s @@ -486,7 +486,7 @@ metricbeat.modules: # Password to use when connecting to MongoDB. Empty by default. #password: pass -#-------------------------------- Munin Module ------------------------------- +#-------------------------------- Munin Module -------------------------------- - module: munin metricsets: ["node"] enabled: true @@ -494,7 +494,7 @@ metricbeat.modules: hosts: ["localhost:4949"] node.namespace: node -#-------------------------------- MySQL Module ------------------------------- +#-------------------------------- MySQL Module -------------------------------- - module: mysql metricsets: - "status" @@ -515,7 +515,7 @@ metricbeat.modules: # By setting raw to true, all raw fields from the status metricset will be added to the event. #raw: false -#-------------------------------- Nginx Module ------------------------------- +#-------------------------------- Nginx Module -------------------------------- - module: nginx metricsets: ["stubstatus"] enabled: true @@ -527,7 +527,7 @@ metricbeat.modules: # Path to server status. Default server-status server_status_path: "server-status" -#------------------------------- PHP_FPM Module ------------------------------ +#------------------------------- PHP_FPM Module ------------------------------- - module: php_fpm metricsets: - pool @@ -537,7 +537,7 @@ metricbeat.modules: status_path: "/status" hosts: ["localhost:8080"] -#----------------------------- PostgreSQL Module ----------------------------- +#------------------------------ PostgreSQL Module ------------------------------ - module: postgresql enabled: true metricsets: @@ -564,7 +564,7 @@ metricbeat.modules: # Password to use when connecting to PostgreSQL. Empty by default. #password: pass -#----------------------------- Prometheus Module ----------------------------- +#------------------------------ Prometheus Module ------------------------------ - module: prometheus metricsets: ["stats"] enabled: true @@ -586,7 +586,7 @@ metricbeat.modules: #ssl.certificate_authorities: # - /var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt -#------------------------------ RabbitMQ Module ------------------------------ +#------------------------------- RabbitMQ Module ------------------------------- - module: rabbitmq metricsets: ["node", "queue", "connection"] enabled: true @@ -600,7 +600,7 @@ metricbeat.modules: #username: guest #password: guest -#-------------------------------- Redis Module ------------------------------- +#-------------------------------- Redis Module -------------------------------- - module: redis metricsets: ["info", "keyspace"] enabled: true @@ -632,20 +632,20 @@ metricbeat.modules: # Redis AUTH password. Empty by default. #password: foobared -#------------------------------- traefik Module ------------------------------ +#------------------------------- Traefik Module ------------------------------- - module: traefik metricsets: ["health"] period: 10s hosts: ["localhost:8080"] -#-------------------------------- uwsgi Module ------------------------------- +#-------------------------------- Uwsgi Module -------------------------------- - module: uwsgi metricsets: ["status"] enable: true period: 10s hosts: ["tcp://127.0.0.1:9191"] -#------------------------------- vSphere Module ------------------------------ +#------------------------------- VSphere Module ------------------------------- - module: vsphere enabled: true metricsets: ["datastore", "host", "virtualmachine"] @@ -659,7 +659,7 @@ metricbeat.modules: # Get custom fields when using virtualmachine metric set. Default false. # get_custom_fields: false -#------------------------------- Windows Module ------------------------------ +#------------------------------- Windows Module ------------------------------- - module: windows metricsets: ["perfmon"] enabled: true @@ -676,7 +676,7 @@ metricbeat.modules: enabled: true period: 60s -#------------------------------ ZooKeeper Module ----------------------------- +#------------------------------ ZooKeeper Module ------------------------------ - module: zookeeper enabled: true metricsets: ["mntr"] diff --git a/metricbeat/module/system/_meta/config.reference.yml b/metricbeat/module/system/_meta/config.reference.yml index c22fc6ac279..e230c6e5018 100644 --- a/metricbeat/module/system/_meta/config.reference.yml +++ b/metricbeat/module/system/_meta/config.reference.yml @@ -1,7 +1,9 @@ - module: system metricsets: - cpu # CPU usage +{{- if ne .GOOS "windows" }} - load # CPU load averages +{{- end }} - memory # Memory usage - network # Network IO - process # Per process metrics @@ -13,7 +15,9 @@ #- filesystem # File system usage for each mountpoint #- fsstat # File system summary metrics #- raid # Raid +{{- if eq .GOOS "linux" }} #- socket # Sockets and connection info (linux only) +{{- end }} enabled: true period: 10s processes: ['.*'] diff --git a/metricbeat/module/system/_meta/config.yml b/metricbeat/module/system/_meta/config.yml index 0ecf5c0a148..a913835c984 100644 --- a/metricbeat/module/system/_meta/config.yml +++ b/metricbeat/module/system/_meta/config.yml @@ -2,7 +2,9 @@ period: 10s metricsets: - cpu +{{- if ne .GOOS "windows" }} - load +{{- end }} - memory - network - process @@ -10,7 +12,9 @@ - socket_summary #- core #- diskio +{{- if eq .GOOS "linux" }} #- socket +{{- end }} process.include_top_n: by_cpu: 5 # include top 5 processes by CPU by_memory: 5 # include top 5 processes by memory diff --git a/metricbeat/scripts/generate_imports_helper.py b/metricbeat/scripts/generate_imports_helper.py deleted file mode 100644 index 8a4fe83e25e..00000000000 --- a/metricbeat/scripts/generate_imports_helper.py +++ /dev/null @@ -1,27 +0,0 @@ -from os.path import abspath, isdir, join -from os import listdir - -comment = """Package include imports all Module and MetricSet packages so that they register -their factories with the global registry. This package can be imported in the -main package to automatically register all of the standard supported Metricbeat -modules.""" - - -def get_importable_lines(go_beat_path, import_line): - path = abspath("module") - - imported_lines = [] - modules = [m for m in listdir(path) if isdir(join(path, m)) and m != "_meta"] - for module in modules: - module_import = import_line.format(beat_path=go_beat_path, module="module", name=module) - imported_lines.append(module_import) - - module_path = join(path, module) - ignore = ["_meta", "vendor", "mtest"] - metricsets = [m for m in listdir(module_path) if isdir(join(module_path, m)) and m not in ignore] - for metricset in metricsets: - metricset_name = "{}/{}".format(module, metricset) - metricset_import = import_line.format(beat_path=go_beat_path, module="module", name=metricset_name) - imported_lines.append(metricset_import) - - return sorted(imported_lines) diff --git a/metricbeat/scripts/mage/config.go b/metricbeat/scripts/mage/config.go new file mode 100644 index 00000000000..434bf159016 --- /dev/null +++ b/metricbeat/scripts/mage/config.go @@ -0,0 +1,64 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/elastic/beats/dev-tools/mage" +) + +const ( + modulesConfigYml = "build/config.modules.yml" +) + +// config generates short/reference/docker configs. +func config() error { + var args mage.ConfigFileParams + switch mage.BeatProjectType { + case mage.OSSProject, mage.CommunityProject: + args = configFileParams(mage.OSSBeatDir("module")) + case mage.XPackProject: + args = configFileParams(mage.OSSBeatDir("module"), "module") + default: + panic(mage.ErrUnknownProjectType) + } + return mage.Config(mage.AllConfigTypes, args, ".") +} + +func configFileParams(moduleDirs ...string) mage.ConfigFileParams { + collectModuleConfig := func() error { + return mage.GenerateModuleReferenceConfig(modulesConfigYml, moduleDirs...) + } + + return mage.ConfigFileParams{ + ShortParts: []string{ + mage.OSSBeatDir("_meta/common.yml"), + mage.OSSBeatDir("_meta/setup.yml"), + mage.LibbeatDir("_meta/config.yml"), + }, + ReferenceDeps: []interface{}{collectModuleConfig}, + ReferenceParts: []string{ + mage.OSSBeatDir("_meta/common.reference.yml"), + modulesConfigYml, + mage.LibbeatDir("_meta/config.reference.yml"), + }, + DockerParts: []string{ + mage.OSSBeatDir("_meta/beat.docker.yml"), + mage.LibbeatDir("_meta/config.docker.yml"), + }, + } +} diff --git a/metricbeat/scripts/mage/fields.go b/metricbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..d0b9cee514f --- /dev/null +++ b/metricbeat/scripts/mage/fields.go @@ -0,0 +1,100 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + "go.uber.org/multierr" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { + switch mage.BeatProjectType { + case mage.OSSProject, mage.CommunityProject: + return multierr.Combine( + b.commonFieldsGo(), + b.moduleFieldsGo(), + ) + case mage.XPackProject: + return b.moduleFieldsGo() + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (b fieldsBuilder) FieldsYML() error { + var modules []string + switch mage.BeatProjectType { + case mage.OSSProject, mage.CommunityProject: + modules = append(modules, + mage.OSSBeatDir("module"), + ) + case mage.XPackProject: + modules = append(modules, + mage.OSSBeatDir("module"), + mage.XPackBeatDir("module"), + ) + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (b fieldsBuilder) FieldsAllYML() error { + switch mage.BeatProjectType { + case mage.CommunityProject: + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML, "module") + case mage.OSSProject, mage.XPackProject: + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML, + mage.OSSBeatDir("module"), + mage.XPackBeatDir("module"), + ) + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} + +func (b fieldsBuilder) moduleFieldsGo() error { + return mage.GenerateModuleFieldsGo("module") +} diff --git a/metricbeat/scripts/mage/generate.go b/metricbeat/scripts/mage/generate.go new file mode 100644 index 00000000000..57c270f6b79 --- /dev/null +++ b/metricbeat/scripts/mage/generate.go @@ -0,0 +1,65 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + "path/filepath" + + "github.com/magefile/mage/sh" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" +) + +// GenerateMetricSet generates a new MetricSet. It will create the module too +// if it does not already exist. +func GenerateMetricSet() error { + module, metricset := os.Getenv("MODULE"), os.Getenv("METRICSET") + if module == "" { + return errors.New("MODULE must be set") + } + if metricset == "" { + return errors.New("METRICSET must be set") + } + + ve, err := mage.PythonVirtualenv() + if err != nil { + return err + } + + pythonPath, err := mage.LookVirtualenvPath(ve, "python") + if err != nil { + return err + } + + elasticBeats, err := mage.ElasticBeatsDir() + if err != nil { + return err + } + + // TODO: Port this script to Go. + return sh.RunV( + pythonPath, + filepath.Join(elasticBeats, "metricbeat/scripts/create_metricset.py"), + "--path", mage.CWD(), + "--es_beats", elasticBeats, + "--module", module, + "--metricset", metricset, + ) +} diff --git a/metricbeat/scripts/mage/package.go b/metricbeat/scripts/mage/package.go new file mode 100644 index 00000000000..a26872097f0 --- /dev/null +++ b/metricbeat/scripts/mage/package.go @@ -0,0 +1,113 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "fmt" + "path/filepath" + "time" + + "github.com/magefile/mage/mg" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" +) + +func init() { + mage.BeatDescription = "Metricbeat is a lightweight shipper for metrics." +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() + + switch mage.BeatProjectType { + case mage.OSSProject: + mage.UseElasticBeatOSSPackaging() + case mage.XPackProject: + mage.UseElasticBeatXPackPackaging() + case mage.CommunityProject: + mage.UseCommunityBeatPackaging() + } + mage.PackageKibanaDashboardsFromBuildDir() + customizePackaging() + + mg.Deps(Update.All) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} + +// ----------------------------------------------------------------------------- +// Customizations specific to Metricbeat. +// - Include modules.d directory in packages. +// - Disable system/load metricset for Windows. + +// customizePackaging modifies the package specs to add the modules.d directory. +// And for Windows it comments out the system/load metricset because it's +// not supported. +func customizePackaging() { + const shortConfigGlob = "modules/*/_meta/config.yml" + inputGlobs := []string{mage.OSSBeatDir(shortConfigGlob)} + if mage.BeatProjectType == mage.XPackProject { + inputGlobs = append(inputGlobs, mage.XPackBeatDir(shortConfigGlob)) + } + + var ( + modulesDTarget = "modules.d" + modulesD = mage.PackageFile{ + Mode: 0644, + Source: "{{.PackageDir}}/modules.d", + Config: true, + Modules: true, + Dep: func(spec mage.PackageSpec) error { + packageDir := spec.MustExpand("{{.PackageDir}}") + targetDir := filepath.Join(packageDir, "modules.d") + return mage.GenerateDirModulesD( + mage.InputGlobs(inputGlobs...), + mage.OutputDir(targetDir), + mage.SetTemplateVariable("GOOS", spec.OS), + mage.SetTemplateVariable("GOARCH", mage.MustExpand("{{.GOARCH}}")), + mage.EnableModule("system"), + ) + }, + } + ) + + for _, args := range mage.Packages { + for _, pkgType := range args.Types { + switch pkgType { + case mage.TarGz, mage.Zip, mage.Docker: + args.Spec.Files[modulesDTarget] = modulesD + case mage.Deb, mage.RPM: + args.Spec.Files["/etc/{{.BeatName}}/"+modulesDTarget] = modulesD + case mage.DMG: + args.Spec.Files["/etc/{{.BeatName}}/"+modulesDTarget] = modulesD + default: + panic(errors.Errorf("unhandled package type: %v", pkgType)) + } + + break + } + } +} diff --git a/metricbeat/scripts/mage/update.go b/metricbeat/scripts/mage/update.go new file mode 100644 index 00000000000..b261e3e76e3 --- /dev/null +++ b/metricbeat/scripts/mage/update.go @@ -0,0 +1,146 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "log" + "os" + "os/exec" + "strings" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/docs" + "github.com/elastic/beats/dev-tools/mage/target/integtest" + "github.com/elastic/beats/dev-tools/mage/target/unittest" +) + +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + docs.RegisterDeps(Update.FieldDocs, Update.ModuleDocs) + + unittest.RegisterPythonTestDeps(Update.Fields) + + integtest.RegisterPythonTestDeps(Update.Fields, Update.Dashboards) +} + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Config, Update.Dashboards, + Update.Includes, Update.ModulesD) +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + switch mage.BeatProjectType { + case mage.OSSProject, mage.CommunityProject: + return mage.KibanaDashboards(mage.OSSBeatDir("module")) + case mage.XPackProject: + return mage.KibanaDashboards(mage.OSSBeatDir("module"), + mage.XPackBeatDir("module")) + default: + panic(mage.ErrUnknownProjectType) + } +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// Includes updates include/list.go. +func (Update) Includes() error { + mg.Deps(Update.Fields) + return mage.GenerateIncludeListGo(nil, []string{"module"}) +} + +// ModulesD updates the modules.d directory. +func (Update) ModulesD() error { + // Only generate modules.d if there is a module dir. Newly generated + // beats based on Metricbeat initially do not have a module dir. + if _, err := os.Stat("module"); err == nil { + return mage.GenerateDirModulesD(mage.EnableModule("system")) + } + return nil +} + +// FieldDocs updates the field documentation. +func (Update) FieldDocs() error { + mg.Deps(fb.FieldsAllYML) + return mage.Docs.FieldDocs(mage.FieldsAllYML) +} + +// ModuleDocs collects documentation from modules (both OSS and X-Pack). +func (Update) ModuleDocs() error { + // Warning: This script does NOT work outside of the OSS Metricbeat + // directory because it was not written in a portable manner. + if mage.BeatProjectType == mage.CommunityProject { + return nil + } + + ve, err := mage.PythonVirtualenv() + if err != nil { + return err + } + + python, err := mage.LookVirtualenvPath(ve, "python") + if err != nil { + return err + } + + if err = os.RemoveAll(mage.OSSBeatDir("docs/modules")); err != nil { + return err + } + if err = os.MkdirAll(mage.OSSBeatDir("docs/modules"), 0755); err != nil { + return err + } + + // TODO: Port this script to Go. + return runIn(mage.OSSBeatDir(), python, + mage.OSSBeatDir("scripts/docs_collector.py"), + "--beat", mage.BeatName) +} + +func runIn(dir, cmd string, args ...string) error { + c := exec.Command(cmd, args...) + c.Dir = dir + c.Env = os.Environ() + c.Stderr = os.Stderr + if mg.Verbose() { + c.Stdout = os.Stdout + } + c.Stdin = os.Stdin + log.Printf("exec: (pwd=%v) %v %v", dir, cmd, strings.Join(args, " ")) + return c.Run() +} diff --git a/metricbeat/tests/system/test_base.py b/metricbeat/tests/system/test_base.py index d64b0c94cae..fd84ba09f05 100644 --- a/metricbeat/tests/system/test_base.py +++ b/metricbeat/tests/system/test_base.py @@ -58,7 +58,7 @@ def test_dashboards(self): Test that the dashboards can be loaded with `setup --dashboards` """ - kibana_dir = os.path.join(self.beat_path, "_meta", "kibana.generated") + kibana_dir = os.path.join(self.beat_path, "build", "kibana") shutil.copytree(kibana_dir, os.path.join(self.working_dir, "kibana")) es = Elasticsearch([self.get_elasticsearch_url()]) diff --git a/x-pack/metricbeat/Makefile b/x-pack/metricbeat/Makefile index 56633e2b3e5..7427a5c672b 100644 --- a/x-pack/metricbeat/Makefile +++ b/x-pack/metricbeat/Makefile @@ -1,3 +1,4 @@ -ES_BEATS ?= ../.. - -include $(ES_BEATS)/dev-tools/make/xpack.mk +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/metricbeat/magefile.go b/x-pack/metricbeat/magefile.go index 1f4635fcc6a..2b070dceaa0 100644 --- a/x-pack/metricbeat/magefile.go +++ b/x-pack/metricbeat/magefile.go @@ -1,274 +1,39 @@ // Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one // or more contributor license agreements. Licensed under the Elastic License; // you may not use this file except in compliance with the Elastic License. - // +build mage package main import ( - "context" - "fmt" - "os" - "path/filepath" - "regexp" - "strings" - "time" - "github.com/magefile/mage/mg" "github.com/elastic/beats/dev-tools/mage" -) - -func init() { - mage.BeatDescription = "Metricbeat is a lightweight shipper for metrics." - mage.BeatLicense = "Elastic License" -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use BEAT_VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.LoadLocalNamedSpec("xpack") - - mg.Deps(Update, prepareModulePackaging) - mg.Deps(CrossBuild, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages(mage.WithModulesD()) -} - -// Fields generates a fields.yml and fields.go for each module. -func Fields() { - mg.Deps(fieldsYML, moduleFieldsGo) -} - -func moduleFieldsGo() error { - return mage.GenerateModuleFieldsGo("module") -} - -// fieldsYML generates a fields.yml based on filebeat + x-pack/filebeat/modules. -func fieldsYML() error { - return mage.GenerateFieldsYAML(mage.OSSBeatDir("module"), "module") -} - -// Dashboards collects all the dashboards and generates index patterns. -func Dashboards() error { - return mage.KibanaDashboards(mage.OSSBeatDir("module"), "module") -} - -// Config generates both the short and reference configs. -func Config() { - mg.Deps(shortConfig, referenceConfig, createDirModulesD) -} -// Update is an alias for running fields, dashboards, config. -func Update() { - mg.SerialDeps(Fields, Dashboards, Config, prepareModulePackaging, - mage.GenerateModuleIncludeListGo) -} - -// Fmt formats source code and adds file headers. -func Fmt() { - mg.Deps(mage.Format) -} - -// Check runs fmt and update then returns an error if any modifications are found. -func Check() { - mg.SerialDeps(mage.Format, Update, mage.Check) -} - -// IntegTest executes integration tests (it uses Docker to run the tests). -func IntegTest() { - mage.AddIntegTestUsage() - defer mage.StopIntegTestEnv() - mg.SerialDeps(GoIntegTest, PythonIntegTest) -} - -// UnitTest executes the unit tests. -func UnitTest() { - mg.SerialDeps(GoUnitTest, PythonUnitTest) -} - -// GoUnitTest executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoUnitTest(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoIntegTest executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoIntegTest(ctx context.Context) error { - return mage.RunIntegTest("goIntegTest", func() error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) - }) -} - -// PythonUnitTest executes the python system tests. -func PythonUnitTest() error { - mg.Deps(mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestUnitArgs()) -} - -// PythonIntegTest executes the python system tests in the integration environment (Docker). -func PythonIntegTest(ctx context.Context) error { - if !mage.IsInIntegTestEnv() { - mg.Deps(Fields) - } - return mage.RunIntegTest("pythonIntegTest", func() error { - mg.Deps(mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestIntegrationArgs()) - }) -} - -// ----------------------------------------------------------------------------- -// Customizations specific to Metricbeat. -// - Include modules.d directory in packages. - -const ( - dirModulesDGenerated = "build/package/modules.d" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + metricbeat "github.com/elastic/beats/metricbeat/scripts/mage" ) -// prepareModulePackaging generates modules and modules.d directories -// for an x-pack distribution, excluding _meta and test files so that they are -// not included in packages. -func prepareModulePackaging() error { - mg.Deps(createDirModulesD) - - err := mage.Clean([]string{ - dirModulesDGenerated, - }) - if err != nil { - return err - } - - for _, copyAction := range []struct { - src, dst string - }{ - {mage.OSSBeatDir("modules.d"), dirModulesDGenerated}, - {"modules.d", dirModulesDGenerated}, - } { - err := (&mage.CopyTask{ - Source: copyAction.src, - Dest: copyAction.dst, - Mode: 0644, - DirMode: 0755, - }).Execute() - if err != nil { - return err - } - } - return nil -} - -func shortConfig() error { - var configParts = []string{ - mage.OSSBeatDir("_meta/common.yml"), - mage.OSSBeatDir("_meta/setup.yml"), - "{{ elastic_beats_dir }}/libbeat/_meta/config.yml", - } - - for i, f := range configParts { - configParts[i] = mage.MustExpand(f) - } - - configFile := mage.BeatName + ".yml" - mage.MustFileConcat(configFile, 0640, configParts...) - mage.MustFindReplace(configFile, regexp.MustCompile("beatname"), mage.BeatName) - mage.MustFindReplace(configFile, regexp.MustCompile("beat-index-prefix"), mage.BeatIndexPrefix) - return nil -} - -func referenceConfig() error { - const modulesConfigYml = "build/config.modules.yml" - err := mage.GenerateModuleReferenceConfig(modulesConfigYml, mage.OSSBeatDir("module"), "module") - if err != nil { - return err - } - defer os.Remove(modulesConfigYml) - - var configParts = []string{ - mage.OSSBeatDir("_meta/common.reference.yml"), - modulesConfigYml, - "{{ elastic_beats_dir }}/libbeat/_meta/config.reference.yml", - } - - for i, f := range configParts { - configParts[i] = mage.MustExpand(f) - } +func init() { + mage.BeatProjectType = mage.XPackProject - configFile := mage.BeatName + ".reference.yml" - mage.MustFileConcat(configFile, 0640, configParts...) - mage.MustFindReplace(configFile, regexp.MustCompile("beatname"), mage.BeatName) - mage.MustFindReplace(configFile, regexp.MustCompile("beat-index-prefix"), mage.BeatIndexPrefix) - return nil + mage.BeatLicense = "Elastic License" } -func createDirModulesD() error { - if err := os.RemoveAll("modules.d"); err != nil { - return err - } - - shortConfigs, err := filepath.Glob("module/*/_meta/config.yml") - if err != nil { - return err - } - - for _, f := range shortConfigs { - parts := strings.Split(filepath.ToSlash(f), "/") - if len(parts) < 2 { - continue - } - moduleName := parts[1] - - cp := mage.CopyTask{ - Source: f, - Dest: filepath.Join("modules.d", moduleName+".yml.disabled"), - Mode: 0644, - } - if err = cp.Execute(); err != nil { - return err - } - } - return nil -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(metricbeat.Update.All) } diff --git a/x-pack/metricbeat/metricbeat.docker.yml b/x-pack/metricbeat/metricbeat.docker.yml new file mode 100644 index 00000000000..982018eefc7 --- /dev/null +++ b/x-pack/metricbeat/metricbeat.docker.yml @@ -0,0 +1,11 @@ +metricbeat.config.modules: + path: ${path.config}/modules.d/*.yml + reload.enabled: false + +processors: +- add_cloud_metadata: ~ + +output.elasticsearch: + hosts: '${ELASTICSEARCH_HOSTS:elasticsearch:9200}' + username: '${ELASTICSEARCH_USERNAME:}' + password: '${ELASTICSEARCH_PASSWORD:}' diff --git a/x-pack/metricbeat/modules.d/mssql.yml.disabled b/x-pack/metricbeat/modules.d/mssql.yml.disabled index 4008489e10d..97e8c8ad3ab 100644 --- a/x-pack/metricbeat/modules.d/mssql.yml.disabled +++ b/x-pack/metricbeat/modules.d/mssql.yml.disabled @@ -1,3 +1,6 @@ +# Module: mssql +# Docs: https://www.elastic.co/guide/en/beats/metricbeat/master/metricbeat-module-mssql.html + - module: mssql metricsets: - "db" diff --git a/x-pack/metricbeat/packages.yml b/x-pack/metricbeat/packages.yml deleted file mode 100644 index a0cf5267533..00000000000 --- a/x-pack/metricbeat/packages.yml +++ /dev/null @@ -1,90 +0,0 @@ -specs: - xpack: - - os: windows - types: [zip] - spec: - <<: *windows_binary_spec - <<: *elastic_license_for_binaries - files: - modules.d: - mode: 0644 - source: build/package/modules.d - config: true - kibana: - source: build/kibana - mode: 0644 - - - os: darwin - types: [tgz] - spec: - <<: *binary_spec - <<: *elastic_license_for_binaries - files: - modules.d: - mode: 0644 - source: build/package/modules.d - config: true - kibana: - source: build/kibana - mode: 0644 - - - os: darwin - types: [dmg] - spec: - <<: *macos_beat_pkg_spec - <<: *elastic_license_for_macos_pkg - files: - /etc/{{.BeatName}}/modules.d: - mode: 0644 - source: build/package/modules.d - config: true - '/Library/Application Support/{{.BeatVendor}}/{{.BeatName}}/kibana': - source: build/kibana - mode: 0644 - - - os: linux - types: [tgz] - spec: - <<: *binary_spec - <<: *elastic_license_for_binaries - files: - modules.d: - mode: 0644 - source: build/package/modules.d - config: true - kibana: - source: build/kibana - mode: 0644 - - - os: linux - types: [deb, rpm] - spec: - <<: *deb_rpm_spec - <<: *elastic_license_for_deb_rpm - files: - '/etc/{{.BeatName}}/modules.d': - mode: 0644 - source: build/package/modules.d - config: true - '/usr/share/{{.BeatName}}/kibana': - source: build/kibana - mode: 0644 - - - os: linux - types: [docker] - spec: - <<: *docker_spec - <<: *elastic_docker_spec - <<: *elastic_license_for_binaries - files: - '{{.BeatName}}.yml': - source: '../../metricbeat/metricbeat.docker.yml' - mode: 0600 - config: true - modules.d: - mode: 0644 - source: build/package/modules.d - config: true - kibana: - source: build/kibana - mode: 0644 From 442c241b432372eda01478a0d5bbaa3c6ccfe544 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:57:18 -0500 Subject: [PATCH 09/18] Refactor packetbeat build logic --- packetbeat/Makefile | 20 +- packetbeat/include/fields.go | 2 +- packetbeat/magefile.go | 498 +----- packetbeat/scripts/mage/build.go | 339 ++++ packetbeat/scripts/mage/config.go | 37 +- packetbeat/scripts/mage/fields.go | 86 ++ packetbeat/scripts/mage/package.go | 104 ++ packetbeat/scripts/mage/update.go | 85 + x-pack/packetbeat/Makefile | 4 + x-pack/packetbeat/magefile.go | 34 + x-pack/packetbeat/make.bat | 11 + x-pack/packetbeat/packetbeat.docker.yml | 42 + x-pack/packetbeat/packetbeat.reference.yml | 1630 ++++++++++++++++++++ x-pack/packetbeat/packetbeat.yml | 234 +++ 14 files changed, 2617 insertions(+), 509 deletions(-) create mode 100644 packetbeat/scripts/mage/build.go create mode 100644 packetbeat/scripts/mage/fields.go create mode 100644 packetbeat/scripts/mage/package.go create mode 100644 packetbeat/scripts/mage/update.go create mode 100644 x-pack/packetbeat/Makefile create mode 100644 x-pack/packetbeat/magefile.go create mode 100644 x-pack/packetbeat/make.bat create mode 100644 x-pack/packetbeat/packetbeat.docker.yml create mode 100644 x-pack/packetbeat/packetbeat.reference.yml create mode 100644 x-pack/packetbeat/packetbeat.yml diff --git a/packetbeat/Makefile b/packetbeat/Makefile index 1ef455d71cf..36b5d6c29b0 100644 --- a/packetbeat/Makefile +++ b/packetbeat/Makefile @@ -1,15 +1,11 @@ -BEAT_NAME?=packetbeat -BEAT_TITLE?=Packetbeat -SYSTEM_TESTS?=true -TEST_ENVIRONMENT=false -ES_BEATS?=.. -EXCLUDE_COMMON_UPDATE_TARGET=true +# +# Includes +# +include ../dev-tools/make/oss.mk -include ${ES_BEATS}/libbeat/scripts/Makefile - -.PHONY: update -update: mage - mage update +# +# Targets +# .PHONY: benchmark benchmark: @@ -17,4 +13,4 @@ benchmark: .PHONY: create-tcp-protocol create-tcp-protocol: python-env - @${PYTHON_ENV}/bin/python ${ES_BEATS}/packetbeat/scripts/create_tcp_protocol.py + @${PYTHON_VE_DIR}/bin/python ${ES_BEATS}/packetbeat/scripts/create_tcp_protocol.py diff --git a/packetbeat/include/fields.go b/packetbeat/include/fields.go index 9392acdd493..61f21426117 100644 --- a/packetbeat/include/fields.go +++ b/packetbeat/include/fields.go @@ -24,7 +24,7 @@ import ( ) func init() { - if err := asset.SetFields("packetbeat", "fields.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("packetbeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } diff --git a/packetbeat/magefile.go b/packetbeat/magefile.go index 144b8f32e4b..890a43f66c4 100644 --- a/packetbeat/magefile.go +++ b/packetbeat/magefile.go @@ -20,492 +20,30 @@ package main import ( - "context" - "fmt" - "log" - "regexp" - "strings" - "time" - "github.com/magefile/mage/mg" - "github.com/magefile/mage/sh" - "github.com/pkg/errors" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import packetbeat "github.com/elastic/beats/packetbeat/scripts/mage" ) func init() { - mage.BeatDescription = "Packetbeat analyzes network traffic and sends the data to Elasticsearch." -} - -// Aliases provides compatibility with CI while we transition all Beats -// to having common testing targets. -var Aliases = map[string]interface{}{ - "goTestUnit": GoUnitTest, // dev-tools/jenkins_ci.ps1 uses this. -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - if dep, found := crossBuildDeps[mage.Platform.Name]; found { - mg.Deps(dep) - } - - params := mage.DefaultGolangCrossBuildArgs() - if flags, found := libpcapLDFLAGS[mage.Platform.Name]; found { - params.Env = map[string]string{ - "CGO_LDFLAGS": flags, - } - } - if flags, found := libpcapCFLAGS[mage.Platform.Name]; found { - params.Env["CGO_CFLAGS"] = flags - } - - return mage.GolangCrossBuild(params) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - mg.Deps(patchCGODirectives) - defer undoPatchCGODirectives() - - // These Windows builds write temporary .s and .o files into the packetbeat - // dir so they cannot be run in parallel. Changing to a different CWD does - // not change where the temp files get written so that cannot be used as a - // fix. - if err := mage.CrossBuild(mage.ForPlatforms("windows"), mage.Serially()); err != nil { - return err - } - - return mage.CrossBuild(mage.ForPlatforms("!windows")) -} - -// CrossBuildXPack cross-builds the beat with XPack for all target platforms. -func CrossBuildXPack() error { - mg.Deps(patchCGODirectives) - defer undoPatchCGODirectives() - - // These Windows builds write temporary .s and .o files into the packetbeat - // dir so they cannot be run in parallel. Changing to a different CWD does - // not change where the temp files get written so that cannot be used as a - // fix. - if err := mage.CrossBuildXPack(mage.ForPlatforms("windows"), mage.Serially()); err != nil { - return err - } - - return mage.CrossBuildXPack(mage.ForPlatforms("!windows")) -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatPackaging() - mage.PackageKibanaDashboardsFromBuildDir() - customizePackaging() - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildXPack, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Update updates the generated files. -func Update() { - mg.SerialDeps(Fields, Dashboards, Config, includeList, fieldDocs) -} - -// Config generates the config files. -func Config() error { - return mage.Config(mage.AllConfigTypes, packetbeat.ConfigFileParams(), ".") -} - -func includeList() error { - return mage.GenerateIncludeListGo([]string{"protos/*"}, nil) -} - -// Fields generates fields.yml and fields.go files for the Beat. -func Fields() { - mg.Deps(libbeatAndPacketbeatCommonFieldsGo, protosFieldsGo) - mg.Deps(fieldsYML) -} - -// libbeatAndPacketbeatCommonFieldsGo generates a fields.go containing both -// libbeat and packetbeat's common fields. -func libbeatAndPacketbeatCommonFieldsGo() error { - if err := mage.GenerateFieldsYAML(); err != nil { - return err - } - return mage.GenerateAllInOneFieldsGo() -} - -// protosFieldsGo generates a fields.go for each protocol. -func protosFieldsGo() error { - return mage.GenerateModuleFieldsGo("protos") -} - -// fieldsYML generates the fields.yml file containing all fields. -func fieldsYML() error { - return mage.GenerateFieldsYAML("protos") -} - -func fieldDocs() error { - return mage.Docs.FieldDocs("fields.yml") -} - -// Dashboards collects all the dashboards and generates index patterns. -func Dashboards() error { - return mage.KibanaDashboards("protos") -} - -// Fmt formats source code and adds file headers. -func Fmt() { - mg.Deps(mage.Format) -} - -// Check runs fmt and update then returns an error if any modifications are found. -func Check() { - mg.SerialDeps(mage.Format, Update, mage.Check) -} - -// IntegTest executes integration tests (it uses Docker to run the tests). -func IntegTest() { - fmt.Println(">> integTest: Complete (no tests require the integ test environment)") -} - -// UnitTest executes the unit tests. -func UnitTest() { - mg.SerialDeps(GoUnitTest, PythonUnitTest) -} - -// GoUnitTest executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoUnitTest(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// PythonUnitTest executes the python system tests. -func PythonUnitTest() error { - mg.SerialDeps(Fields, mage.BuildSystemTestBinary) - return mage.PythonNoseTest(mage.DefaultPythonTestUnitArgs()) -} - -// ----------------------------------------------------------------------------- -// Customizations specific to Packetbeat. -// - Config file contains an OS specific device name (affects darwin, windows). -// - Must compile libpcap or winpcap during cross-compilation. -// - On Linux libpcap is statically linked. Darwin and Windows are dynamic. - -const ( - libpcapURL = "https://s3.amazonaws.com/beats-files/deps/libpcap-1.8.1.tar.gz" - libpcapSHA256 = "673dbc69fdc3f5a86fb5759ab19899039a8e5e6c631749e48dcd9c6f0c83541e" -) - -const ( - linuxPcapLDFLAGS = "-L/libpcap/libpcap-1.8.1 -lpcap" - linuxPcapCFLAGS = "-I /libpcap/libpcap-1.8.1" -) - -var libpcapLDFLAGS = map[string]string{ - "linux/386": linuxPcapLDFLAGS, - "linux/amd64": linuxPcapLDFLAGS, - "linux/arm64": linuxPcapLDFLAGS, - "linux/armv5": linuxPcapLDFLAGS, - "linux/armv6": linuxPcapLDFLAGS, - "linux/armv7": linuxPcapLDFLAGS, - "linux/mips": linuxPcapLDFLAGS, - "linux/mipsle": linuxPcapLDFLAGS, - "linux/mips64": linuxPcapLDFLAGS, - "linux/mips64le": linuxPcapLDFLAGS, - "linux/ppc64le": linuxPcapLDFLAGS, - "linux/s390x": linuxPcapLDFLAGS, - "darwin/amd64": "-lpcap", - "windows/amd64": "-L /libpcap/win/WpdPack/Lib/x64 -lwpcap", - "windows/386": "-L /libpcap/win/WpdPack/Lib -lwpcap", -} - -var libpcapCFLAGS = map[string]string{ - "linux/386": linuxPcapCFLAGS, - "linux/amd64": linuxPcapCFLAGS, - "linux/arm64": linuxPcapCFLAGS, - "linux/armv5": linuxPcapCFLAGS, - "linux/armv6": linuxPcapCFLAGS, - "linux/armv7": linuxPcapCFLAGS, - "linux/mips": linuxPcapCFLAGS, - "linux/mipsle": linuxPcapCFLAGS, - "linux/mips64": linuxPcapCFLAGS, - "linux/mips64le": linuxPcapCFLAGS, - "linux/ppc64le": linuxPcapCFLAGS, - "linux/s390x": linuxPcapCFLAGS, - "windows/amd64": "-I /libpcap/win/WpdPack/Include", - "windows/386": "-I /libpcap/win/WpdPack/Include", -} - -var crossBuildDeps = map[string]func() error{ - "linux/386": buildLibpcapLinux386, - "linux/amd64": buildLibpcapLinuxAMD64, - "linux/arm64": buildLibpcapLinuxARM64, - "linux/armv5": buildLibpcapLinuxARMv5, - "linux/armv6": buildLibpcapLinuxARMv6, - "linux/armv7": buildLibpcapLinuxARMv7, - "linux/mips": buildLibpcapLinuxMIPS, - "linux/mipsle": buildLibpcapLinuxMIPSLE, - "linux/mips64": buildLibpcapLinuxMIPS64, - "linux/mips64le": buildLibpcapLinuxMIPS64LE, - "linux/ppc64le": buildLibpcapLinuxPPC64LE, - "linux/s390x": buildLibpcapLinuxS390x, - "windows/amd64": installLibpcapWindowsAMD64, - "windows/386": installLibpcapWindows386, -} - -// buildLibpcapFromSource builds libpcap from source because the library needs -// to be compiled with -fPIC. -// See https://github.com/elastic/beats/pull/4217. -func buildLibpcapFromSource(params map[string]string) error { - tarFile, err := mage.DownloadFile(libpcapURL, "/libpcap") - if err != nil { - return errors.Wrap(err, "failed to download libpcap source") - } - - if err = mage.VerifySHA256(tarFile, libpcapSHA256); err != nil { - return err - } - - if err = mage.Extract(tarFile, "/libpcap"); err != nil { - return errors.Wrap(err, "failed to extract libpcap") - } - - var configureArgs []string - for k, v := range params { - if strings.HasPrefix(k, "-") { - delete(params, k) - configureArgs = append(configureArgs, k+"="+v) - } - } - - // Use sh -c here because sh.Run does not expose a way to change the CWD. - // This command only runs in Linux so this is fine. - return sh.RunWith(params, "sh", "-c", - "cd /libpcap/libpcap-1.8.1 && "+ - "./configure --enable-usb=no --enable-bluetooth=no --enable-dbus=no "+strings.Join(configureArgs, " ")+"&& "+ - "make") + packetbeat.SelectLogic = mage.OSSProject } -func buildLibpcapLinux386() error { - return buildLibpcapFromSource(map[string]string{ - "CFLAGS": "-m32", - "LDFLAGS": "-m32", - }) -} - -func buildLibpcapLinuxAMD64() error { - return buildLibpcapFromSource(map[string]string{}) -} - -func buildLibpcapLinuxARM64() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "aarch64-unknown-linux-gnu", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxARMv5() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "arm-linux-gnueabi", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxARMv6() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "arm-linux-gnueabi", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxARMv7() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "arm-linux-gnueabihf", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxMIPS() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "mips-unknown-linux-gnu", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxMIPSLE() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "mipsle-unknown-linux-gnu", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxMIPS64() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "mips64-unknown-linux-gnu", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxMIPS64LE() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "mips64le-unknown-linux-gnu", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxPPC64LE() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "powerpc64le-linux-gnu", - "--with-pcap": "linux", - }) -} - -func buildLibpcapLinuxS390x() error { - return buildLibpcapFromSource(map[string]string{ - "--host": "s390x-ibm-linux-gnu", - "--with-pcap": "linux", - }) -} - -func installLibpcapWindowsAMD64() error { - mg.SerialDeps(installWinpcap, generateWin64StaticWinpcap) - return nil -} - -func installLibpcapWindows386() error { - return installWinpcap() -} - -func installWinpcap() error { - log.Println("Install Winpcap") - const wpdpackURL = "https://www.winpcap.org/install/bin/WpdPack_4_1_2.zip" - - winpcapZip, err := mage.DownloadFile(wpdpackURL, "/") - if err != nil { - return err - } - - if err = mage.Extract(winpcapZip, "/libpcap/win"); err != nil { - return err - } - - return nil -} - -func generateWin64StaticWinpcap() error { - log.Println(">> Generating 64-bit winpcap static lib") - - // Notes: We are using absolute path to make sure the files - // are available for x-pack build. - // Ref: https://github.com/elastic/beats/issues/1259 - defer mage.DockerChown(mage.MustExpand("{{elastic_beats_dir}}/{{.BeatName}}/lib")) - return mage.RunCmds( - // Requires mingw-w64-tools. - []string{"gendef", mage.MustExpand("{{elastic_beats_dir}}/{{.BeatName}}/lib/windows-64/wpcap.dll")}, - []string{"mv", "wpcap.def", mage.MustExpand("{{ elastic_beats_dir}}/{{.BeatName}}/lib/windows-64/wpcap.def")}, - []string{"x86_64-w64-mingw32-dlltool", "--as-flags=--64", - "-m", "i386:x86-64", "-k", - "--output-lib", "/libpcap/win/WpdPack/Lib/x64/libwpcap.a", - "--input-def", mage.MustExpand("{{elastic_beats_dir}}/{{.BeatName}}/lib/windows-64/wpcap.def")}, - ) -} - -const pcapGoFile = "{{ elastic_beats_dir }}/vendor/github.com/tsg/gopacket/pcap/pcap.go" - -var cgoDirectiveRegex = regexp.MustCompile(`(?m)#cgo .*(?:LDFLAGS|CFLAGS).*$`) - -func patchCGODirectives() error { - // cgo directives do not support GOARM tags so we will clear the tags - // and set them via CGO_LDFLAGS and CGO_CFLAGS. - // Ref: https://github.com/golang/go/issues/7211 - f := mage.MustExpand(pcapGoFile) - log.Println("Patching", f, cgoDirectiveRegex.String()) - return mage.FindReplace(f, cgoDirectiveRegex, "") -} - -func undoPatchCGODirectives() error { - return sh.Run("git", "checkout", mage.MustExpand(pcapGoFile)) -} - -// customizePackaging modifies the device in the configuration files based on -// the target OS. -func customizePackaging() { - var ( - configYml = mage.PackageFile{ - Mode: 0600, - Source: "{{.PackageDir}}/{{.BeatName}}.yml", - Config: true, - Dep: func(spec mage.PackageSpec) error { - c := packetbeat.ConfigFileParams() - c.ExtraVars["GOOS"] = spec.OS - c.ExtraVars["GOARCH"] = spec.MustExpand("{{.GOARCH}}") - return mage.Config(mage.ShortConfigType, c, spec.MustExpand("{{.PackageDir}}")) - }, - } - referenceConfigYml = mage.PackageFile{ - Mode: 0644, - Source: "{{.PackageDir}}/{{.BeatName}}.reference.yml", - Dep: func(spec mage.PackageSpec) error { - c := packetbeat.ConfigFileParams() - c.ExtraVars["GOOS"] = spec.OS - c.ExtraVars["GOARCH"] = spec.MustExpand("{{.GOARCH}}") - return mage.Config(mage.ReferenceConfigType, c, spec.MustExpand("{{.PackageDir}}")) - }, - } - ) - - for _, args := range mage.Packages { - for _, pkgType := range args.Types { - switch pkgType { - case mage.TarGz, mage.Zip: - args.Spec.ReplaceFile("{{.BeatName}}.yml", configYml) - args.Spec.ReplaceFile("{{.BeatName}}.reference.yml", referenceConfigYml) - case mage.Deb, mage.RPM, mage.DMG: - args.Spec.ReplaceFile("/etc/{{.BeatName}}/{{.BeatName}}.yml", configYml) - args.Spec.ReplaceFile("/etc/{{.BeatName}}/{{.BeatName}}.reference.yml", referenceConfigYml) - case mage.Docker: - args.Spec.ExtraVar("linux_capabilities", "cap_net_raw,cap_net_admin=eip") - default: - panic(errors.Errorf("unhandled package type: %v", pkgType)) - } - - // Match the first package type then continue. - break - } - } -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(packetbeat.Update.All) } diff --git a/packetbeat/scripts/mage/build.go b/packetbeat/scripts/mage/build.go new file mode 100644 index 00000000000..970f02498d7 --- /dev/null +++ b/packetbeat/scripts/mage/build.go @@ -0,0 +1,339 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "log" + "regexp" + "strings" + + "github.com/magefile/mage/mg" + "github.com/magefile/mage/sh" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" +) + +// Build builds the Beat binary. +func Build() error { + return mage.Build(mage.DefaultBuildArgs()) +} + +// GolangCrossBuild build the Beat binary inside of the golang-builder. +// Do not use directly, use crossBuild instead. +func GolangCrossBuild() error { + if dep, found := crossBuildDeps[mage.Platform.Name]; found { + mg.Deps(dep) + } + + params := mage.DefaultGolangCrossBuildArgs() + if flags, found := libpcapLDFLAGS[mage.Platform.Name]; found { + params.Env = map[string]string{ + "CGO_LDFLAGS": flags, + } + } + if flags, found := libpcapCFLAGS[mage.Platform.Name]; found { + params.Env["CGO_CFLAGS"] = flags + } + + return mage.GolangCrossBuild(params) +} + +// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). +func BuildGoDaemon() error { + return mage.BuildGoDaemon() +} + +// CrossBuild cross-builds the beat for all target platforms. +func CrossBuild() error { + mg.Deps(patchCGODirectives) + defer undoPatchCGODirectives() + + // These Windows builds write temporary .s and .o files into the packetbeat + // dir so they cannot be run in parallel. Changing to a different CWD does + // not change where the temp files get written so that cannot be used as a + // fix. + if err := mage.CrossBuild(mage.ForPlatforms("windows"), mage.Serially()); err != nil { + return err + } + + return mage.CrossBuild(mage.ForPlatforms("!windows")) +} + +// CrossBuildXPack cross-builds the beat with XPack for all target platforms. +func CrossBuildXPack() error { + mg.Deps(patchCGODirectives) + defer undoPatchCGODirectives() + + // These Windows builds write temporary .s and .o files into the packetbeat + // dir so they cannot be run in parallel. Changing to a different CWD does + // not change where the temp files get written so that cannot be used as a + // fix. + if err := mage.CrossBuildXPack(mage.ForPlatforms("windows"), mage.Serially()); err != nil { + return err + } + + return mage.CrossBuildXPack(mage.ForPlatforms("!windows")) +} + +// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. +func CrossBuildGoDaemon() error { + return mage.CrossBuildGoDaemon() +} + +// ----------------------------------------------------------------------------- +// Customizations specific to Packetbeat. +// - Config file contains an OS specific device name (affects darwin, windows). +// - Must compile libpcap or winpcap during cross-compilation. +// - On Linux libpcap is statically linked. Darwin and Windows are dynamic. + +const ( + libpcapURL = "https://s3.amazonaws.com/beats-files/deps/libpcap-1.8.1.tar.gz" + libpcapSHA256 = "673dbc69fdc3f5a86fb5759ab19899039a8e5e6c631749e48dcd9c6f0c83541e" +) + +const ( + linuxPcapLDFLAGS = "-L/libpcap/libpcap-1.8.1 -lpcap" + linuxPcapCFLAGS = "-I /libpcap/libpcap-1.8.1" +) + +var libpcapLDFLAGS = map[string]string{ + "linux/386": linuxPcapLDFLAGS, + "linux/amd64": linuxPcapLDFLAGS, + "linux/arm64": linuxPcapLDFLAGS, + "linux/armv5": linuxPcapLDFLAGS, + "linux/armv6": linuxPcapLDFLAGS, + "linux/armv7": linuxPcapLDFLAGS, + "linux/mips": linuxPcapLDFLAGS, + "linux/mipsle": linuxPcapLDFLAGS, + "linux/mips64": linuxPcapLDFLAGS, + "linux/mips64le": linuxPcapLDFLAGS, + "linux/ppc64le": linuxPcapLDFLAGS, + "linux/s390x": linuxPcapLDFLAGS, + "darwin/amd64": "-lpcap", + "windows/amd64": "-L /libpcap/win/WpdPack/Lib/x64 -lwpcap", + "windows/386": "-L /libpcap/win/WpdPack/Lib -lwpcap", +} + +var libpcapCFLAGS = map[string]string{ + "linux/386": linuxPcapCFLAGS, + "linux/amd64": linuxPcapCFLAGS, + "linux/arm64": linuxPcapCFLAGS, + "linux/armv5": linuxPcapCFLAGS, + "linux/armv6": linuxPcapCFLAGS, + "linux/armv7": linuxPcapCFLAGS, + "linux/mips": linuxPcapCFLAGS, + "linux/mipsle": linuxPcapCFLAGS, + "linux/mips64": linuxPcapCFLAGS, + "linux/mips64le": linuxPcapCFLAGS, + "linux/ppc64le": linuxPcapCFLAGS, + "linux/s390x": linuxPcapCFLAGS, + "windows/amd64": "-I /libpcap/win/WpdPack/Include", + "windows/386": "-I /libpcap/win/WpdPack/Include", +} + +var crossBuildDeps = map[string]func() error{ + "linux/386": buildLibpcapLinux386, + "linux/amd64": buildLibpcapLinuxAMD64, + "linux/arm64": buildLibpcapLinuxARM64, + "linux/armv5": buildLibpcapLinuxARMv5, + "linux/armv6": buildLibpcapLinuxARMv6, + "linux/armv7": buildLibpcapLinuxARMv7, + "linux/mips": buildLibpcapLinuxMIPS, + "linux/mipsle": buildLibpcapLinuxMIPSLE, + "linux/mips64": buildLibpcapLinuxMIPS64, + "linux/mips64le": buildLibpcapLinuxMIPS64LE, + "linux/ppc64le": buildLibpcapLinuxPPC64LE, + "linux/s390x": buildLibpcapLinuxS390x, + "windows/amd64": installLibpcapWindowsAMD64, + "windows/386": installLibpcapWindows386, +} + +// buildLibpcapFromSource builds libpcap from source because the library needs +// to be compiled with -fPIC. +// See https://github.com/elastic/beats/pull/4217. +func buildLibpcapFromSource(params map[string]string) error { + tarFile, err := mage.DownloadFile(libpcapURL, "/libpcap") + if err != nil { + return errors.Wrap(err, "failed to download libpcap source") + } + + if err = mage.VerifySHA256(tarFile, libpcapSHA256); err != nil { + return err + } + + if err = mage.Extract(tarFile, "/libpcap"); err != nil { + return errors.Wrap(err, "failed to extract libpcap") + } + + var configureArgs []string + for k, v := range params { + if strings.HasPrefix(k, "-") { + delete(params, k) + configureArgs = append(configureArgs, k+"="+v) + } + } + + // Use sh -c here because sh.Run does not expose a way to change the CWD. + // This command only runs in Linux so this is fine. + return sh.RunWith(params, "sh", "-c", + "cd /libpcap/libpcap-1.8.1 && "+ + "./configure --enable-usb=no --enable-bluetooth=no --enable-dbus=no "+strings.Join(configureArgs, " ")+"&& "+ + "make") +} + +func buildLibpcapLinux386() error { + return buildLibpcapFromSource(map[string]string{ + "CFLAGS": "-m32", + "LDFLAGS": "-m32", + }) +} + +func buildLibpcapLinuxAMD64() error { + return buildLibpcapFromSource(map[string]string{}) +} + +func buildLibpcapLinuxARM64() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "aarch64-unknown-linux-gnu", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxARMv5() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "arm-linux-gnueabi", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxARMv6() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "arm-linux-gnueabi", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxARMv7() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "arm-linux-gnueabihf", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxMIPS() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "mips-unknown-linux-gnu", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxMIPSLE() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "mipsle-unknown-linux-gnu", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxMIPS64() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "mips64-unknown-linux-gnu", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxMIPS64LE() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "mips64le-unknown-linux-gnu", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxPPC64LE() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "powerpc64le-linux-gnu", + "--with-pcap": "linux", + }) +} + +func buildLibpcapLinuxS390x() error { + return buildLibpcapFromSource(map[string]string{ + "--host": "s390x-ibm-linux-gnu", + "--with-pcap": "linux", + }) +} + +func installLibpcapWindowsAMD64() error { + mg.SerialDeps(installWinpcap, generateWin64StaticWinpcap) + return nil +} + +func installLibpcapWindows386() error { + return installWinpcap() +} + +func installWinpcap() error { + log.Println("Install Winpcap") + const wpdpackURL = "https://www.winpcap.org/install/bin/WpdPack_4_1_2.zip" + + winpcapZip, err := mage.DownloadFile(wpdpackURL, "/") + if err != nil { + return err + } + + if err = mage.Extract(winpcapZip, "/libpcap/win"); err != nil { + return err + } + + return nil +} + +func generateWin64StaticWinpcap() error { + log.Println(">> Generating 64-bit winpcap static lib") + + // Notes: We are using absolute path to make sure the files + // are available for x-pack build. + // Ref: https://github.com/elastic/beats/issues/1259 + defer mage.DockerChown(mage.MustExpand("{{elastic_beats_dir}}/{{.BeatName}}/lib")) + return mage.RunCmds( + // Requires mingw-w64-tools. + []string{"gendef", mage.MustExpand("{{elastic_beats_dir}}/{{.BeatName}}/lib/windows-64/wpcap.dll")}, + []string{"mv", "wpcap.def", mage.MustExpand("{{ elastic_beats_dir}}/{{.BeatName}}/lib/windows-64/wpcap.def")}, + []string{"x86_64-w64-mingw32-dlltool", "--as-flags=--64", + "-m", "i386:x86-64", "-k", + "--output-lib", "/libpcap/win/WpdPack/Lib/x64/libwpcap.a", + "--input-def", mage.MustExpand("{{elastic_beats_dir}}/{{.BeatName}}/lib/windows-64/wpcap.def")}, + ) +} + +const pcapGoFile = "{{ elastic_beats_dir }}/vendor/github.com/tsg/gopacket/pcap/pcap.go" + +var cgoDirectiveRegex = regexp.MustCompile(`(?m)#cgo .*(?:LDFLAGS|CFLAGS).*$`) + +func patchCGODirectives() error { + // cgo directives do not support GOARM tags so we will clear the tags + // and set them via CGO_LDFLAGS and CGO_CFLAGS. + // Ref: https://github.com/golang/go/issues/7211 + f := mage.MustExpand(pcapGoFile) + log.Println("Patching", f, cgoDirectiveRegex.String()) + return mage.FindReplace(f, cgoDirectiveRegex, "") +} + +func undoPatchCGODirectives() error { + return sh.Run("git", "checkout", mage.MustExpand(pcapGoFile)) +} diff --git a/packetbeat/scripts/mage/config.go b/packetbeat/scripts/mage/config.go index b0aab867465..a5f98008ca0 100644 --- a/packetbeat/scripts/mage/config.go +++ b/packetbeat/scripts/mage/config.go @@ -26,37 +26,28 @@ const ( configTemplateGlob = "protos/*/_meta/config*.yml.tmpl" ) -var defaultDevice = map[string]string{ - "darwin": "en0", - "linux": "any", - "windows": "0", -} - -func device(goos string) string { - dev, found := defaultDevice[goos] - if found { - return dev - } - return "any" +// Config generates the config files. +func config() error { + return mage.Config(mage.AllConfigTypes, configFileParams(), ".") } // ConfigFileParams returns the default ConfigFileParams for generating // packetbeat*.yml files. -func ConfigFileParams() mage.ConfigFileParams { +func configFileParams() mage.ConfigFileParams { return mage.ConfigFileParams{ ShortParts: []string{ mage.OSSBeatDir("_meta/beat.yml"), - configTemplateGlob, + mage.OSSBeatDir(configTemplateGlob), mage.LibbeatDir("_meta/config.yml"), }, ReferenceParts: []string{ mage.OSSBeatDir("_meta/beat.reference.yml"), - configTemplateGlob, + mage.OSSBeatDir(configTemplateGlob), mage.LibbeatDir("_meta/config.reference.yml"), }, DockerParts: []string{ mage.OSSBeatDir("_meta/beat.docker.yml"), - configTemplateGlob, + mage.OSSBeatDir(configTemplateGlob), mage.LibbeatDir("_meta/config.docker.yml"), }, ExtraVars: map[string]interface{}{ @@ -64,3 +55,17 @@ func ConfigFileParams() mage.ConfigFileParams { }, } } + +var defaultDevice = map[string]string{ + "darwin": "en0", + "linux": "any", + "windows": "0", +} + +func device(goos string) string { + dev, found := defaultDevice[goos] + if found { + return dev + } + return "any" +} diff --git a/packetbeat/scripts/mage/fields.go b/packetbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..94af6c0ee04 --- /dev/null +++ b/packetbeat/scripts/mage/fields.go @@ -0,0 +1,86 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + "go.uber.org/multierr" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { + switch SelectLogic { + case mage.OSSProject: + return multierr.Combine( + b.commonFieldsGo(), + b.protosFieldsGo(), + ) + case mage.XPackProject: + return nil + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (b fieldsBuilder) FieldsYML() error { + var modules []string + switch SelectLogic { + case mage.OSSProject, mage.XPackProject: + modules = append(modules, mage.OSSBeatDir("protos")) + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (b fieldsBuilder) FieldsAllYML() error { + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML, + mage.OSSBeatDir("protos"), + ) +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} + +// protosFieldsGo generates a fields.go for each protocol. +func (fieldsBuilder) protosFieldsGo() error { + return mage.GenerateModuleFieldsGo(mage.OSSBeatDir("protos")) +} diff --git a/packetbeat/scripts/mage/package.go b/packetbeat/scripts/mage/package.go new file mode 100644 index 00000000000..45c5026f683 --- /dev/null +++ b/packetbeat/scripts/mage/package.go @@ -0,0 +1,104 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "fmt" + "time" + + "github.com/magefile/mage/mg" + "github.com/pkg/errors" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" +) + +func init() { + mage.BeatDescription = "Packetbeat analyzes network traffic and sends the data to Elasticsearch." +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() + + switch SelectLogic { + case mage.OSSProject: + mage.UseElasticBeatOSSPackaging() + case mage.XPackProject: + mage.UseElasticBeatXPackPackaging() + } + mage.PackageKibanaDashboardsFromBuildDir() + customizePackaging() + + mg.Deps(Update.All) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} + +// customizePackaging modifies the device in the configuration files based on +// the target OS. +func customizePackaging() { + var ( + configYml = mage.PackageFile{ + Mode: 0600, + Source: "{{.PackageDir}}/{{.BeatName}}.yml", + Config: true, + Dep: func(spec mage.PackageSpec) error { + c := configFileParams() + c.ExtraVars["GOOS"] = spec.OS + c.ExtraVars["GOARCH"] = spec.MustExpand("{{.GOARCH}}") + return mage.Config(mage.ShortConfigType, c, spec.MustExpand("{{.PackageDir}}")) + }, + } + referenceConfigYml = mage.PackageFile{ + Mode: 0644, + Source: "{{.PackageDir}}/{{.BeatName}}.reference.yml", + Dep: func(spec mage.PackageSpec) error { + c := configFileParams() + c.ExtraVars["GOOS"] = spec.OS + c.ExtraVars["GOARCH"] = spec.MustExpand("{{.GOARCH}}") + return mage.Config(mage.ReferenceConfigType, c, spec.MustExpand("{{.PackageDir}}")) + }, + } + ) + + for _, args := range mage.Packages { + for _, pkgType := range args.Types { + switch pkgType { + case mage.TarGz, mage.Zip: + args.Spec.ReplaceFile("{{.BeatName}}.yml", configYml) + args.Spec.ReplaceFile("{{.BeatName}}.reference.yml", referenceConfigYml) + case mage.Deb, mage.RPM, mage.DMG: + args.Spec.ReplaceFile("/etc/{{.BeatName}}/{{.BeatName}}.yml", configYml) + args.Spec.ReplaceFile("/etc/{{.BeatName}}/{{.BeatName}}.reference.yml", referenceConfigYml) + case mage.Docker: + args.Spec.ExtraVar("linux_capabilities", "cap_net_raw,cap_net_admin=eip") + default: + panic(errors.Errorf("unhandled package type: %v", pkgType)) + } + + // Match the first package type then continue. + break + } + } +} diff --git a/packetbeat/scripts/mage/update.go b/packetbeat/scripts/mage/update.go new file mode 100644 index 00000000000..b8a1bd3a749 --- /dev/null +++ b/packetbeat/scripts/mage/update.go @@ -0,0 +1,85 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/docs" + "github.com/elastic/beats/dev-tools/mage/target/unittest" +) + +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + docs.RegisterDeps(Update.FieldDocs) + + unittest.RegisterPythonTestDeps(Update.Fields) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Dashboards, Update.Config, + Update.Includes, Update.FieldDocs) +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + return mage.KibanaDashboards() +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// FieldDocs updates the field documentation. +func (Update) FieldDocs() error { + mg.Deps(fb.FieldsAllYML) + return mage.Docs.FieldDocs(mage.FieldsAllYML) +} + +// Includes updates include/list.go. +func (Update) Includes() error { + if SelectLogic != mage.OSSProject { + // Only OSS has protocols. + return nil + } + mg.Deps(fb.FieldsGo) + return mage.GenerateIncludeListGo([]string{"protos/*"}, nil) +} diff --git a/x-pack/packetbeat/Makefile b/x-pack/packetbeat/Makefile new file mode 100644 index 00000000000..7427a5c672b --- /dev/null +++ b/x-pack/packetbeat/Makefile @@ -0,0 +1,4 @@ +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/packetbeat/magefile.go b/x-pack/packetbeat/magefile.go new file mode 100644 index 00000000000..c6afb4dd14c --- /dev/null +++ b/x-pack/packetbeat/magefile.go @@ -0,0 +1,34 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// +build mage + +package main + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + packetbeat "github.com/elastic/beats/packetbeat/scripts/mage" +) + +func init() { + packetbeat.SelectLogic = mage.XPackProject +} + +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(packetbeat.Update.All) } diff --git a/x-pack/packetbeat/make.bat b/x-pack/packetbeat/make.bat new file mode 100644 index 00000000000..81de1ba946f --- /dev/null +++ b/x-pack/packetbeat/make.bat @@ -0,0 +1,11 @@ +@echo off + +REM Windows wrapper for Mage (https://magefile.org/) that installs it +REM to %GOPATH%\bin from the Beats vendor directory. +REM +REM After running this once you may invoke mage.exe directly. + +WHERE mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage + +mage %* diff --git a/x-pack/packetbeat/packetbeat.docker.yml b/x-pack/packetbeat/packetbeat.docker.yml new file mode 100644 index 00000000000..b475f516f49 --- /dev/null +++ b/x-pack/packetbeat/packetbeat.docker.yml @@ -0,0 +1,42 @@ +packetbeat.interfaces.device: any + +packetbeat.flows: + timeout: 30s + period: 10s + +packetbeat.protocols.dns: + ports: [53] + include_authorities: true + include_additionals: true + +packetbeat.protocols.http: + ports: [80, 5601, 9200, 8080, 8081, 5000, 8002] + +packetbeat.protocols.memcache: + ports: [11211] + +packetbeat.protocols.mysql: + ports: [3306] + +packetbeat.protocols.pgsql: + ports: [5432] + +packetbeat.protocols.redis: + ports: [6379] + +packetbeat.protocols.thrift: + ports: [9090] + +packetbeat.protocols.mongodb: + ports: [27017] + +packetbeat.protocols.cassandra: + ports: [9042] + +processors: +- add_cloud_metadata: ~ + +output.elasticsearch: + hosts: '${ELASTICSEARCH_HOSTS:elasticsearch:9200}' + username: '${ELASTICSEARCH_USERNAME:}' + password: '${ELASTICSEARCH_PASSWORD:}' diff --git a/x-pack/packetbeat/packetbeat.reference.yml b/x-pack/packetbeat/packetbeat.reference.yml new file mode 100644 index 00000000000..ad1b43fa4b6 --- /dev/null +++ b/x-pack/packetbeat/packetbeat.reference.yml @@ -0,0 +1,1630 @@ +###################### Packetbeat Configuration Example ####################### + +# This file is a full configuration example documenting all non-deprecated +# options in comments. For a shorter configuration example, that contains only +# the most common options, please see packetbeat.yml in the same directory. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/packetbeat/index.html + +#============================== Network device ================================ + +# Select the network interface to sniff the data. You can use the "any" +# keyword to sniff on all connected interfaces. +packetbeat.interfaces.device: any + +# Packetbeat supports three sniffer types: +# * pcap, which uses the libpcap library and works on most platforms, but it's +# not the fastest option. +# * af_packet, which uses memory-mapped sniffing. This option is faster than +# libpcap and doesn't require a kernel module, but it's Linux-specific. +#packetbeat.interfaces.type: pcap + +# The maximum size of the packets to capture. The default is 65535, which is +# large enough for almost all networks and interface types. If you sniff on a +# physical network interface, the optimal setting is the MTU size. On virtual +# interfaces, however, it's safer to accept the default value. +#packetbeat.interfaces.snaplen: 65535 + +# The maximum size of the shared memory buffer to use between the kernel and +# user space. A bigger buffer usually results in lower CPU usage, but consumes +# more memory. This setting is only available for the af_packet sniffer type. +# The default is 30 MB. +#packetbeat.interfaces.buffer_size_mb: 30 + +# Packetbeat automatically generates a BPF for capturing only the traffic on +# ports where it expects to find known protocols. Use this settings to tell +# Packetbeat to generate a BPF filter that accepts VLAN tags. +#packetbeat.interfaces.with_vlans: true + +# Use this setting to override the automatically generated BPF filter. +#packetbeat.interfaces.bpf_filter: + +#================================== Flows ===================================== + +packetbeat.flows: + # Enable Network flows. Default: true + #enabled: true + + # Set network flow timeout. Flow is killed if no packet is received before being + # timed out. + timeout: 30s + + # Configure reporting period. If set to -1, only killed flows will be reported + period: 10s + +#========================== Transaction protocols ============================= + +packetbeat.protocols: +- type: icmp + # Enable ICMPv4 and ICMPv6 monitoring. Default: true + #enabled: true + +- type: amqp + # Enable AMQP monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for AMQP traffic. You can disable + # the AMQP protocol by commenting out the list of ports. + ports: [5672] + # Truncate messages that are published and avoid huge messages being + # indexed. + # Default: 1000 + #max_body_length: 1000 + + # Hide the header fields in header frames. + # Default: false + #parse_headers: false + + # Hide the additional arguments of method frames. + # Default: false + #parse_arguments: false + + # Hide all methods relative to connection negotiation between server and + # client. + # Default: true + #hide_connection_information: true + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: cassandra + #Cassandra port for traffic monitoring. + ports: [9042] + + # If this option is enabled, the raw message of the request (`cassandra_request` field) + # is included in published events. The default is true. + #send_request: true + + # If this option is enabled, the raw message of the response (`cassandra_request.request_headers` field) + # is included in published events. The default is true. enable `send_request` first before enable this option. + #send_request_header: true + + # If this option is enabled, the raw message of the response (`cassandra_response` field) + # is included in published events. The default is true. + #send_response: true + + # If this option is enabled, the raw message of the response (`cassandra_response.response_headers` field) + # is included in published events. The default is true. enable `send_response` first before enable this option. + #send_response_header: true + + # Configures the default compression algorithm being used to uncompress compressed frames by name. Currently only `snappy` is can be configured. + # By default no compressor is configured. + #compressor: "snappy" + + # This option indicates which Operator/Operators will be ignored. + #ignored_ops: ["SUPPORTED","OPTIONS"] + +- type: dhcpv4 + # Configure the DHCP for IPv4 ports. + ports: [67, 68] + +- type: dns + # Enable DNS monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for DNS traffic. You can disable + # the DNS protocol by commenting out the list of ports. + ports: [53] + + # include_authorities controls whether or not the dns.authorities field + # (authority resource records) is added to messages. + # Default: false + include_authorities: true + # include_additionals controls whether or not the dns.additionals field + # (additional resource records) is added to messages. + # Default: false + include_additionals: true + + # send_request and send_response control whether or not the stringified DNS + # request and response message are added to the result. + # Nearly all data about the request/response is available in the dns.* + # fields, but this can be useful if you need visibility specifically + # into the request or the response. + # Default: false + # send_request: true + # send_response: true + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: http + # Enable HTTP monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for HTTP traffic. You can disable + # the HTTP protocol by commenting out the list of ports. + ports: [80, 8080, 8000, 5000, 8002] + + # Uncomment the following to hide certain parameters in URL or forms attached + # to HTTP requests. The names of the parameters are case insensitive. + # The value of the parameters will be replaced with the 'xxxxx' string. + # This is generally useful for avoiding storing user passwords or other + # sensitive information. + # Only query parameters and top level form parameters are replaced. + # hide_keywords: ['pass', 'password', 'passwd'] + + # A list of header names to capture and send to Elasticsearch. These headers + # are placed under the `headers` dictionary in the resulting JSON. + #send_headers: false + + # Instead of sending a white list of headers to Elasticsearch, you can send + # all headers by setting this option to true. The default is false. + #send_all_headers: false + + # The list of content types for which Packetbeat includes the full HTTP + # payload. If the request's or response's Content-Type matches any on this + # list, the full body will be included under the request or response field. + #include_body_for: [] + + # The list of content types for which Packetbeat includes the full HTTP + # request payload. + #include_request_body_for: [] + + # The list of content types for which Packetbeat includes the full HTTP + # response payload. + #include_response_body_for: [] + + # Whether the body of a request must be decoded when a content-encoding + # or transfer-encoding has been applied. + #decode_body: true + + # If the Cookie or Set-Cookie headers are sent, this option controls whether + # they are split into individual values. + #split_cookie: false + + # The header field to extract the real IP from. This setting is useful when + # you want to capture traffic behind a reverse proxy, but you want to get the + # geo-location information. + #real_ip_header: + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + + # Maximum message size. If an HTTP message is larger than this, it will + # be trimmed to this size. Default is 10 MB. + #max_message_size: 10485760 + +- type: memcache + # Enable memcache monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for memcache traffic. You can disable + # the Memcache protocol by commenting out the list of ports. + ports: [11211] + + # Uncomment the parseunknown option to force the memcache text protocol parser + # to accept unknown commands. + # Note: All unknown commands MUST not contain any data parts! + # Default: false + # parseunknown: true + + # Update the maxvalue option to store the values - base64 encoded - in the + # json output. + # possible values: + # maxvalue: -1 # store all values (text based protocol multi-get) + # maxvalue: 0 # store no values at all + # maxvalue: N # store up to N values + # Default: 0 + # maxvalues: -1 + + # Use maxbytespervalue to limit the number of bytes to be copied per value element. + # Note: Values will be base64 encoded, so actual size in json document + # will be 4 times maxbytespervalue. + # Default: unlimited + # maxbytespervalue: 100 + + # UDP transaction timeout in milliseconds. + # Note: Quiet messages in UDP binary protocol will get response only in error case. + # The memcached analyzer will wait for udptransactiontimeout milliseconds + # before publishing quiet messages. Non quiet messages or quiet requests with + # error response will not have to wait for the timeout. + # Default: 200 + # udptransactiontimeout: 1000 + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: mysql + # Enable mysql monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for MySQL traffic. You can disable + # the MySQL protocol by commenting out the list of ports. + ports: [3306] + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: pgsql + # Enable pgsql monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for Pgsql traffic. You can disable + # the Pgsql protocol by commenting out the list of ports. + ports: [5432] + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: redis + # Enable redis monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for Redis traffic. You can disable + # the Redis protocol by commenting out the list of ports. + ports: [6379] + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: thrift + # Enable thrift monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for Thrift-RPC traffic. You can disable + # the Thrift-RPC protocol by commenting out the list of ports. + ports: [9090] + + # The Thrift transport type. Currently this option accepts the values socket + # for TSocket, which is the default Thrift transport, and framed for the + # TFramed Thrift transport. The default is socket. + #transport_type: socket + + # The Thrift protocol type. Currently the only accepted value is binary for + # the TBinary protocol, which is the default Thrift protocol. + #protocol_type: binary + + # The Thrift interface description language (IDL) files for the service that + # Packetbeat is monitoring. Providing the IDL enables Packetbeat to include + # parameter and exception names. + #idl_files: [] + + # The maximum length for strings in parameters or return values. If a string + # is longer than this value, the string is automatically truncated to this + # length. + #string_max_size: 200 + + # The maximum number of elements in a Thrift list, set, map, or structure. + #collection_max_size: 15 + + # If this option is set to false, Packetbeat decodes the method name from the + # reply and simply skips the rest of the response message. + #capture_reply: true + + # If this option is set to true, Packetbeat replaces all strings found in + # method parameters, return codes, or exception structures with the "*" + # string. + #obfuscate_strings: false + + # The maximum number of fields that a structure can have before Packetbeat + # ignores the whole transaction. + #drop_after_n_struct_fields: 500 + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: mongodb + # Enable mongodb monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for MongoDB traffic. You can disable + # the MongoDB protocol by commenting out the list of ports. + ports: [27017] + + + # The maximum number of documents from the response to index in the `response` + # field. The default is 10. + #max_docs: 10 + + # The maximum number of characters in a single document indexed in the + # `response` field. The default is 5000. You can set this to 0 to index an + # unlimited number of characters per document. + #max_doc_length: 5000 + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: nfs + # Enable NFS monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for NFS traffic. You can disable + # the NFS protocol by commenting out the list of ports. + ports: [2049] + + # If this option is enabled, the raw message of the request (`request` field) + # is sent to Elasticsearch. The default is false. + #send_request: false + + # If this option is enabled, the raw message of the response (`response` + # field) is sent to Elasticsearch. The default is false. + #send_response: false + + # Transaction timeout. Expired transactions will no longer be correlated to + # incoming responses, but sent to Elasticsearch immediately. + #transaction_timeout: 10s + +- type: tls + # Enable TLS monitoring. Default: true + #enabled: true + + # Configure the ports where to listen for TLS traffic. You can disable + # the TLS protocol by commenting out the list of ports. + ports: [443] + + # List of hash algorithms to use to calculate certificates' fingerprints. + # Valid values are `sha1`, `sha256` and `md5`. + #fingerprints: [sha1] + + # If this option is enabled, the client and server certificates and + # certificate chains are sent to Elasticsearch. The default is true. + #send_certificates: true + + # If this option is enabled, the raw certificates will be stored + # in PEM format under the `raw` key. The default is false. + #include_raw_certificates: false + +#=========================== Monitored processes ============================== + +# Configure the processes to be monitored and how to find them. If a process is +# monitored then Packetbeat attempts to use it's name to fill in the `proc` and +# `client_proc` fields. +# The processes can be found by searching their command line by a given string. +# +# Process matching is optional and can be enabled by uncommenting the following +# lines. +# +#packetbeat.procs: +# enabled: false +# monitored: +# - process: mysqld +# cmdline_grep: mysqld +# +# - process: pgsql +# cmdline_grep: postgres +# +# - process: nginx +# cmdline_grep: nginx +# +# - process: app +# cmdline_grep: gunicorn + +# Uncomment the following if you want to ignore transactions created +# by the server on which the shipper is installed. This option is useful +# to remove duplicates if shippers are installed on multiple servers. +#packetbeat.ignore_outgoing: true + +#================================ General ====================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +# If this options is not defined, the hostname is used. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. Tags make it easy to group servers by different +# logical properties. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. Fields can be scalar values, arrays, dictionaries, or any nested +# combination of these. +#fields: +# env: staging + +# If this option is set to true, the custom fields are stored as top-level +# fields in the output document instead of being grouped under a fields +# sub-dictionary. Default is false. +#fields_under_root: false + +# Internal queue configuration for buffering events to be published. +#queue: + # Queue type by name (default 'mem') + # The memory queue will present all available events (up to the outputs + # bulk_max_size) to the output, the moment the output is ready to server + # another batch of events. + #mem: + # Max number of events the queue can buffer. + #events: 4096 + + # Hints the minimum number of events stored in the queue, + # before providing a batch of events to the outputs. + # The default value is set to 2048. + # A value of 0 ensures events are immediately available + # to be sent to the outputs. + #flush.min_events: 2048 + + # Maximum duration after which events are available to the outputs, + # if the number of events stored in the queue is < min_flush_events. + #flush.timeout: 1s + + # The spool queue will store events in a local spool file, before + # forwarding the events to the outputs. + # + # Beta: spooling to disk is currently a beta feature. Use with care. + # + # The spool file is a circular buffer, which blocks once the file/buffer is full. + # Events are put into a write buffer and flushed once the write buffer + # is full or the flush_timeout is triggered. + # Once ACKed by the output, events are removed immediately from the queue, + # making space for new events to be persisted. + #spool: + # The file namespace configures the file path and the file creation settings. + # Once the file exists, the `size`, `page_size` and `prealloc` settings + # will have no more effect. + #file: + # Location of spool file. The default value is ${path.data}/spool.dat. + #path: "${path.data}/spool.dat" + + # Configure file permissions if file is created. The default value is 0600. + #permissions: 0600 + + # File size hint. The spool blocks, once this limit is reached. The default value is 100 MiB. + #size: 100MiB + + # The files page size. A file is split into multiple pages of the same size. The default value is 4KiB. + #page_size: 4KiB + + # If prealloc is set, the required space for the file is reserved using + # truncate. The default value is true. + #prealloc: true + + # Spool writer settings + # Events are serialized into a write buffer. The write buffer is flushed if: + # - The buffer limit has been reached. + # - The configured limit of buffered events is reached. + # - The flush timeout is triggered. + #write: + # Sets the write buffer size. + #buffer_size: 1MiB + + # Maximum duration after which events are flushed, if the write buffer + # is not full yet. The default value is 1s. + #flush.timeout: 1s + + # Number of maximum buffered events. The write buffer is flushed once the + # limit is reached. + #flush.events: 16384 + + # Configure the on-disk event encoding. The encoding can be changed + # between restarts. + # Valid encodings are: json, ubjson, and cbor. + #codec: cbor + #read: + # Reader flush timeout, waiting for more events to become available, so + # to fill a complete batch, as required by the outputs. + # If flush_timeout is 0, all available events are forwarded to the + # outputs immediately. + # The default value is 0s. + #flush.timeout: 0s + +# Sets the maximum number of CPUs that can be executing simultaneously. The +# default is the number of logical CPUs available in the system. +#max_procs: + +#================================ Processors =================================== + +# Processors are used to reduce the number of fields in the exported event or to +# enhance the event with external metadata. This section defines a list of +# processors that are applied one by one and the first one receives the initial +# event: +# +# event -> filter1 -> event1 -> filter2 ->event2 ... +# +# The supported processors are drop_fields, drop_event, include_fields, +# decode_json_fields, and add_cloud_metadata. +# +# For example, you can use the following processors to keep the fields that +# contain CPU load percentages, but remove the fields that contain CPU ticks +# values: +# +#processors: +#- include_fields: +# fields: ["cpu"] +#- drop_fields: +# fields: ["cpu.user", "cpu.system"] +# +# The following example drops the events that have the HTTP response code 200: +# +#processors: +#- drop_event: +# when: +# equals: +# http.code: 200 +# +# The following example renames the field a to b: +# +#processors: +#- rename: +# fields: +# - from: "a" +# to: "b" +# +# The following example tokenizes the string into fields: +# +#processors: +#- dissect: +# tokenizer: "%{key1} - %{key2}" +# field: "message" +# target_prefix: "dissect" +# +# The following example enriches each event with metadata from the cloud +# provider about the host machine. It works on EC2, GCE, DigitalOcean, +# Tencent Cloud, and Alibaba Cloud. +# +#processors: +#- add_cloud_metadata: ~ +# +# The following example enriches each event with the machine's local time zone +# offset from UTC. +# +#processors: +#- add_locale: +# format: offset +# +# The following example enriches each event with docker metadata, it matches +# given fields to an existing container id and adds info from that container: +# +#processors: +#- add_docker_metadata: +# host: "unix:///var/run/docker.sock" +# match_fields: ["system.process.cgroup.id"] +# match_pids: ["process.pid", "process.ppid"] +# match_source: true +# match_source_index: 4 +# match_short_id: false +# cleanup_timeout: 60 +# labels.dedot: false +# # To connect to Docker over TLS you must specify a client and CA certificate. +# #ssl: +# # certificate_authority: "/etc/pki/root/ca.pem" +# # certificate: "/etc/pki/client/cert.pem" +# # key: "/etc/pki/client/cert.key" +# +# The following example enriches each event with docker metadata, it matches +# container id from log path available in `source` field (by default it expects +# it to be /var/lib/docker/containers/*/*.log). +# +#processors: +#- add_docker_metadata: ~ +# +# The following example enriches each event with host metadata. +# +#processors: +#- add_host_metadata: +# netinfo.enabled: false +# +# The following example enriches each event with process metadata using +# process IDs included in the event. +# +#processors: +#- add_process_metadata: +# match_pids: ["system.process.ppid"] +# target: system.process.parent +# +# The following example decodes fields containing JSON strings +# and replaces the strings with valid JSON objects. +# +#processors: +#- decode_json_fields: +# fields: ["field1", "field2", ...] +# process_array: false +# max_depth: 1 +# target: "" +# overwrite_keys: false + +#============================= Elastic Cloud ================================== + +# These settings simplify using packetbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ====================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------- +output.elasticsearch: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + #ilm.rollover_alias: "packetbeat" + #ilm.pattern: "{now/d}-000001" + + # Set gzip compression level. + #compression_level: 0 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Number of workers per Elasticsearch host. + #worker: 1 + + # Optional index name. The default is "packetbeat" plus date + # and generates [packetbeat-]YYYY.MM.DD keys. + # In case you modify this pattern you must update setup.template.name and setup.template.pattern accordingly. + #index: "packetbeat-%{[beat.version]}-%{+yyyy.MM.dd}" + + # Optional ingest node pipeline. By default no pipeline will be used. + #pipeline: "" + + # Optional HTTP Path + #path: "/elasticsearch" + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing a request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + +#----------------------------- Logstash output --------------------------------- +#output.logstash: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Number of workers per Logstash host. + #worker: 1 + + # Set gzip compression level. + #compression_level: 3 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional maximum time to live for a connection to Logstash, after which the + # connection will be re-established. A value of `0s` (the default) will + # disable this feature. + # + # Not yet supported for async connections (i.e. with the "pipelining" option set) + #ttl: 30s + + # Optional load balance the events between the Logstash hosts. Default is false. + #loadbalance: false + + # Number of batches to be sent asynchronously to Logstash while processing + # new batches. + #pipelining: 2 + + # If enabled only a subset of events in a batch of events is transferred per + # transaction. The number of events to be sent increases up to `bulk_max_size` + # if no error is encountered. + #slow_start: false + + # The number of seconds to wait before trying to reconnect to Logstash + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Logstash after a network error. The default is 60s. + #backoff.max: 60s + + # Optional index name. The default index name is set to packetbeat + # in all lowercase. + #index: 'packetbeat' + + # SOCKS5 proxy server URL + #proxy_url: socks5://user:password@socks5-server:2233 + + # Resolve names locally when using a proxy server. Defaults to false. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat and Winlogbeat, ignore the max_retries setting + # and retry until all events are published. Set max_retries to a value less + # than 0 to retry until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Logstash request. The + # default is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Logstash server before + # timing out. The default is 30s. + #timeout: 30s + +#------------------------------- Kafka output ---------------------------------- +#output.kafka: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The list of Kafka broker addresses from where to fetch the cluster metadata. + # The cluster metadata contain the actual Kafka brokers events are published + # to. + #hosts: ["localhost:9092"] + + # The Kafka topic used for produced events. The setting can be a format string + # using any event field. To set the topic from document type use `%{[type]}`. + #topic: beats + + # The Kafka event key setting. Use format string to create unique event key. + # By default no event key will be generated. + #key: '' + + # The Kafka event partitioning strategy. Default hashing strategy is `hash` + # using the `output.kafka.key` setting or randomly distributes events if + # `output.kafka.key` is not configured. + #partition.hash: + # If enabled, events will only be published to partitions with reachable + # leaders. Default is false. + #reachable_only: false + + # Configure alternative event field names used to compute the hash value. + # If empty `output.kafka.key` setting will be used. + # Default value is empty list. + #hash: [] + + # Authentication details. Password is required if username is set. + #username: '' + #password: '' + + # Kafka version packetbeat is assumed to run against. Defaults to the "1.0.0". + #version: '1.0.0' + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Metadata update configuration. Metadata do contain leader information + # deciding which broker to use when publishing. + #metadata: + # Max metadata request retry attempts when cluster is in middle of leader + # election. Defaults to 3 retries. + #retry.max: 3 + + # Waiting time between retries during leader elections. Default is 250ms. + #retry.backoff: 250ms + + # Refresh metadata interval. Defaults to every 10 minutes. + #refresh_frequency: 10m + + # The number of concurrent load-balanced Kafka output workers. + #worker: 1 + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Kafka request. The default + # is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Kafka brokers before + # timing out. The default is 30s. + #timeout: 30s + + # The maximum duration a broker will wait for number of required ACKs. The + # default is 10s. + #broker_timeout: 10s + + # The number of messages buffered for each Kafka broker. The default is 256. + #channel_buffer_size: 256 + + # The keep-alive period for an active network connection. If 0s, keep-alives + # are disabled. The default is 0 seconds. + #keep_alive: 0 + + # Sets the output compression codec. Must be one of none, snappy and gzip. The + # default is gzip. + #compression: gzip + + # Set the compression level. Currently only gzip provides a compression level + # between 0 and 9. The default value is chosen by the compression algorithm. + #compression_level: 4 + + # The maximum permitted size of JSON-encoded messages. Bigger messages will be + # dropped. The default value is 1000000 (bytes). This value should be equal to + # or less than the broker's message.max.bytes. + #max_message_bytes: 1000000 + + # The ACK reliability level required from broker. 0=no response, 1=wait for + # local commit, -1=wait for all replicas to commit. The default is 1. Note: + # If set to 0, no ACKs are returned by Kafka. Messages might be lost silently + # on error. + #required_acks: 1 + + # The configurable ClientID used for logging, debugging, and auditing + # purposes. The default is "beats". + #client_id: beats + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- Redis output ---------------------------------- +#output.redis: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # The list of Redis servers to connect to. If load balancing is enabled, the + # events are distributed to the servers in the list. If one server becomes + # unreachable, the events are distributed to the reachable servers only. + #hosts: ["localhost:6379"] + + # The Redis port to use if hosts does not contain a port number. The default + # is 6379. + #port: 6379 + + # The name of the Redis list or channel the events are published to. The + # default is packetbeat. + #key: packetbeat + + # The password to authenticate with. The default is no authentication. + #password: + + # The Redis database number where the events are published. The default is 0. + #db: 0 + + # The Redis data type to use for publishing events. If the data type is list, + # the Redis RPUSH command is used. If the data type is channel, the Redis + # PUBLISH command is used. The default value is list. + #datatype: list + + # The number of workers to use for each host configured to publish events to + # Redis. Use this setting along with the loadbalance option. For example, if + # you have 2 hosts and 3 workers, in total 6 workers are started (3 for each + # host). + #worker: 1 + + # If set to true and multiple hosts or workers are configured, the output + # plugin load balances published events onto all Redis hosts. If set to false, + # the output plugin sends all events to only one host (determined at random) + # and will switch to another host if the currently selected one becomes + # unreachable. The default value is true. + #loadbalance: true + + # The Redis connection timeout in seconds. The default is 5 seconds. + #timeout: 5s + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The number of seconds to wait before trying to reconnect to Redis + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Redis after a network error. The default is 60s. + #backoff.max: 60s + + # The maximum number of events to bulk in a single Redis request or pipeline. + # The default is 2048. + #bulk_max_size: 2048 + + # The URL of the SOCKS5 proxy to use when connecting to the Redis servers. The + # value must be a URL with a scheme of socks5://. + #proxy_url: + + # This option determines whether Redis hostnames are resolved locally when + # using a proxy. The default value is false, which means that name resolution + # occurs on the proxy server. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- File output ----------------------------------- +#output.file: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Path to the directory where to save the generated files. The option is + # mandatory. + #path: "/tmp/packetbeat" + + # Name of the generated files. The default is `packetbeat` and it generates + # files: `packetbeat`, `packetbeat.1`, `packetbeat.2`, etc. + #filename: packetbeat + + # Maximum size in kilobytes of each file. When this size is reached, and on + # every packetbeat restart, the files are rotated. The default value is 10240 + # kB. + #rotate_every_kb: 10000 + + # Maximum number of files under path. When this number of files is reached, + # the oldest file is deleted and the rest are shifted from last to first. The + # default is 7 files. + #number_of_files: 7 + + # Permissions to use for file creation. The default is 0600. + #permissions: 0600 + + +#----------------------------- Console output --------------------------------- +#output.console: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + +#================================= Paths ====================================== + +# The home path for the packetbeat installation. This is the default base path +# for all other path settings and for miscellaneous files that come with the +# distribution (for example, the sample dashboards). +# If not set by a CLI flag or in the configuration file, the default for the +# home path is the location of the binary. +#path.home: + +# The configuration path for the packetbeat installation. This is the default +# base path for configuration files, including the main YAML configuration file +# and the Elasticsearch template file. If not set by a CLI flag or in the +# configuration file, the default for the configuration path is the home path. +#path.config: ${path.home} + +# The data path for the packetbeat installation. This is the default base path +# for all the files in which packetbeat needs to store its data. If not set by a +# CLI flag or in the configuration file, the default for the data path is a data +# subdirectory inside the home path. +#path.data: ${path.home}/data + +# The logs path for a packetbeat installation. This is the default location for +# the Beat's log files. If not set by a CLI flag or in the configuration file, +# the default for the logs path is a logs subdirectory inside the home path. +#path.logs: ${path.home}/logs + +#================================ Keystore ========================================== +# Location of the Keystore containing the keys and their sensitive values. +#keystore.path: "${path.config}/beats.keystore" + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards are disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The directory from where to read the dashboards. The default is the `kibana` +# folder in the home path. +#setup.dashboards.directory: ${path.home}/kibana + +# The URL from where to download the dashboards archive. It is used instead of +# the directory if it has a value. +#setup.dashboards.url: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the directory when it has a value. +#setup.dashboards.file: + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#setup.dashboards.beat: packetbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#setup.dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#setup.dashboards.index: + +# Always use the Kibana API for loading the dashboards instead of autodetecting +# how to install the dashboards by first querying Elasticsearch. +#setup.dashboards.always_kibana: false + +# If true and Kibana is not reachable at the time when dashboards are loaded, +# it will retry to reconnect to Kibana instead of exiting with an error. +#setup.dashboards.retry.enabled: false + +# Duration interval between Kibana connection retries. +#setup.dashboards.retry.interval: 1s + +# Maximum number of retries before exiting with an error, 0 for unlimited retrying. +#setup.dashboards.retry.maximum: 0 + + +#============================== Template ===================================== + +# A template is used to set the mapping in Elasticsearch +# By default template loading is enabled and the template is loaded. +# These settings can be adjusted to load your own template or overwrite existing ones. + +# Set to false to disable template loading. +#setup.template.enabled: true + +# Template name. By default the template name is "packetbeat-%{[beat.version]}" +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.name: "packetbeat-%{[beat.version]}" + +# Template pattern. By default the template pattern is "-%{[beat.version]}-*" to apply to the default index settings. +# The first part is the version of the beat and then -* is used to match all daily indices. +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.pattern: "packetbeat-%{[beat.version]}-*" + +# Path to fields.yml file to generate the template +#setup.template.fields: "${path.config}/fields.yml" + +# A list of fields to be added to the template and Kibana index pattern. Also +# specify setup.template.overwrite: true to overwrite the existing template. +# This setting is experimental. +#setup.template.append_fields: +#- name: field_name +# type: field_type + +# Enable json template loading. If this is enabled, the fields.yml is ignored. +#setup.template.json.enabled: false + +# Path to the json template file +#setup.template.json.path: "${path.config}/template.json" + +# Name under which the template is stored in Elasticsearch +#setup.template.json.name: "" + +# Overwrite existing template +#setup.template.overwrite: false + +# Elasticsearch template settings +setup.template.settings: + + # A dictionary of settings to place into the settings.index dictionary + # of the Elasticsearch template. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html + #index: + #number_of_shards: 1 + #codec: best_compression + #number_of_routing_shards: 30 + + # A dictionary of settings for the _source field. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html + #_source: + #enabled: false + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Optional HTTP Path + #path: "" + + # Use SSL settings for HTTPS. Default is true. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + + +#================================ Logging ====================================== +# There are four options for the log output: file, stderr, syslog, eventlog +# The file output is the default. + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: info + +# Enable debug output for selected components. To enable all selectors use ["*"] +# Other available selectors are "beat", "publish", "service" +# Multiple selectors can be chained. +#logging.selectors: [ ] + +# Send all logging output to syslog. The default is false. +#logging.to_syslog: false + +# Send all logging output to Windows Event Logs. The default is false. +#logging.to_eventlog: false + +# If enabled, packetbeat periodically logs its internal metrics that have changed +# in the last period. For each metric that changed, the delta from the value at +# the beginning of the period is logged. Also, the total values for +# all non-zero internal metrics are logged on shutdown. The default is true. +#logging.metrics.enabled: true + +# The period after which to log the internal metrics. The default is 30s. +#logging.metrics.period: 30s + +# Logging to rotating files. Set logging.to_files to false to disable logging to +# files. +logging.to_files: true +logging.files: + # Configure the path where the logs are written. The default is the logs directory + # under the home path (the binary location). + #path: /var/log/packetbeat + + # The name of the files where the logs are written to. + #name: packetbeat + + # Configure log file size limit. If limit is reached, log file will be + # automatically rotated + #rotateeverybytes: 10485760 # = 10MB + + # Number of rotated log files to keep. Oldest files will be deleted first. + #keepfiles: 7 + + # The permissions mask to apply when rotating log files. The default value is 0600. + # Must be a valid Unix-style file permissions mask expressed in octal notation. + #permissions: 0600 + + # Enable log file rotation on time intervals in addition to size-based rotation. + # Intervals must be at least 1s. Values of 1m, 1h, 24h, 7*24h, 30*24h, and 365*24h + # are boundary-aligned with minutes, hours, days, weeks, months, and years as + # reported by the local system clock. All other intervals are calculated from the + # unix epoch. Defaults to disabled. + #interval: 0 + +# Set to true to log messages in json format. +#logging.json: false + + +#============================== Xpack Monitoring ===================================== +# packetbeat can export internal metrics to a central Elasticsearch monitoring cluster. +# This requires xpack monitoring to be enabled in Elasticsearch. +# The reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line, and leave the rest commented out. +#xpack.monitoring.elasticsearch: + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + #hosts: ["localhost:9200"] + + # Set gzip compression level. + #compression_level: 0 + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "beats_system" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing an request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + #metrics.period: 10s + #state.period: 1m + +#================================ HTTP Endpoint ====================================== +# Each beat can expose internal metrics through a HTTP endpoint. For security +# reasons the endpoint is disabled by default. This feature is currently experimental. +# Stats can be access through http://localhost:5066/stats . For pretty JSON output +# append ?pretty to the URL. + +# Defines if the HTTP endpoint is enabled. +#http.enabled: false + +# The HTTP endpoint will bind to this hostname or IP address. It is recommended to use only localhost. +#http.host: localhost + +# Port on which the HTTP endpoint will bind. Default is 5066. +#http.port: 5066 + +#============================= Process Security ================================ + +# Enable or disable seccomp system call filtering on Linux. Default is enabled. +#seccomp.enabled: true diff --git a/x-pack/packetbeat/packetbeat.yml b/x-pack/packetbeat/packetbeat.yml new file mode 100644 index 00000000000..36b2d114295 --- /dev/null +++ b/x-pack/packetbeat/packetbeat.yml @@ -0,0 +1,234 @@ +#################### Packetbeat Configuration Example ######################### + +# This file is an example configuration file highlighting only the most common +# options. The packetbeat.reference.yml file from the same directory contains all the +# supported options with more comments. You can use it as a reference. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/packetbeat/index.html + +#============================== Network device ================================ + +# Select the network interface to sniff the data. On Linux, you can use the +# "any" keyword to sniff on all connected interfaces. +packetbeat.interfaces.device: any + +#================================== Flows ===================================== + +# Set `enabled: false` or comment out all options to disable flows reporting. +packetbeat.flows: + # Set network flow timeout. Flow is killed if no packet is received before being + # timed out. + timeout: 30s + + # Configure reporting period. If set to -1, only killed flows will be reported + period: 10s + +#========================== Transaction protocols ============================= + +packetbeat.protocols: +- type: icmp + # Enable ICMPv4 and ICMPv6 monitoring. Default: false + enabled: true + +- type: amqp + # Configure the ports where to listen for AMQP traffic. You can disable + # the AMQP protocol by commenting out the list of ports. + ports: [5672] + +- type: cassandra + #Cassandra port for traffic monitoring. + ports: [9042] + +- type: dhcpv4 + # Configure the DHCP for IPv4 ports. + ports: [67, 68] + +- type: dns + # Configure the ports where to listen for DNS traffic. You can disable + # the DNS protocol by commenting out the list of ports. + ports: [53] + + # include_authorities controls whether or not the dns.authorities field + # (authority resource records) is added to messages. + include_authorities: true + + # include_additionals controls whether or not the dns.additionals field + # (additional resource records) is added to messages. + include_additionals: true + +- type: http + # Configure the ports where to listen for HTTP traffic. You can disable + # the HTTP protocol by commenting out the list of ports. + ports: [80, 8080, 8000, 5000, 8002] + +- type: memcache + # Configure the ports where to listen for memcache traffic. You can disable + # the Memcache protocol by commenting out the list of ports. + ports: [11211] + +- type: mysql + # Configure the ports where to listen for MySQL traffic. You can disable + # the MySQL protocol by commenting out the list of ports. + ports: [3306] + +- type: pgsql + # Configure the ports where to listen for Pgsql traffic. You can disable + # the Pgsql protocol by commenting out the list of ports. + ports: [5432] + +- type: redis + # Configure the ports where to listen for Redis traffic. You can disable + # the Redis protocol by commenting out the list of ports. + ports: [6379] + +- type: thrift + # Configure the ports where to listen for Thrift-RPC traffic. You can disable + # the Thrift-RPC protocol by commenting out the list of ports. + ports: [9090] + +- type: mongodb + # Configure the ports where to listen for MongoDB traffic. You can disable + # the MongoDB protocol by commenting out the list of ports. + ports: [27017] + +- type: nfs + # Configure the ports where to listen for NFS traffic. You can disable + # the NFS protocol by commenting out the list of ports. + ports: [2049] + +- type: tls + # Configure the ports where to listen for TLS traffic. You can disable + # the TLS protocol by commenting out the list of ports. + ports: [443] + +#==================== Elasticsearch template setting ========================== + +setup.template.settings: + index.number_of_shards: 3 + #index.codec: best_compression + #_source.enabled: false + +#================================ General ===================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. +#fields: +# env: staging + + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#setup.dashboards.url: + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Kibana Space ID + # ID of the Kibana Space into which the dashboards should be loaded. By default, + # the Default Space will be used. + #space.id: + +#============================= Elastic Cloud ================================== + +# These settings simplify using packetbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ===================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------ +output.elasticsearch: + # Array of hosts to connect to. + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + +#----------------------------- Logstash output -------------------------------- +#output.logstash: + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Optional SSL. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + +#================================ Processors ===================================== + +# Configure processors to enhance or manipulate events generated by the beat. + +processors: + - add_host_metadata: ~ + - add_cloud_metadata: ~ + +#================================ Logging ===================================== + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: debug + +# At debug level, you can selectively enable logging only for some components. +# To enable all selectors use ["*"]. Examples of other selectors are "beat", +# "publish", "service". +#logging.selectors: ["*"] + +#============================== Xpack Monitoring =============================== +# packetbeat can export internal metrics to a central Elasticsearch monitoring +# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The +# reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line. +#xpack.monitoring.elasticsearch: From bb2cdc65d1b479ad62cb60df2d5b805a17eb3268 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:58:33 -0500 Subject: [PATCH 10/18] Refactor winlogbeat build logic --- winlogbeat/Makefile | 22 +- winlogbeat/cmd/root.go | 9 +- winlogbeat/include/fields.go | 2 +- winlogbeat/magefile.go | 107 +- winlogbeat/main.go | 1 - winlogbeat/scripts/mage/config.go | 48 + winlogbeat/scripts/mage/fields.go | 75 ++ winlogbeat/scripts/mage/package.go | 56 + winlogbeat/scripts/mage/update.go | 73 ++ x-pack/winlogbeat/Makefile | 9 + x-pack/winlogbeat/magefile.go | 36 + x-pack/winlogbeat/make.bat | 11 + x-pack/winlogbeat/winlogbeat.reference.yml | 1170 ++++++++++++++++++++ x-pack/winlogbeat/winlogbeat.yml | 155 +++ 14 files changed, 1674 insertions(+), 100 deletions(-) create mode 100644 winlogbeat/scripts/mage/config.go create mode 100644 winlogbeat/scripts/mage/fields.go create mode 100644 winlogbeat/scripts/mage/package.go create mode 100644 winlogbeat/scripts/mage/update.go create mode 100644 x-pack/winlogbeat/Makefile create mode 100644 x-pack/winlogbeat/magefile.go create mode 100644 x-pack/winlogbeat/make.bat create mode 100644 x-pack/winlogbeat/winlogbeat.reference.yml create mode 100644 x-pack/winlogbeat/winlogbeat.yml diff --git a/winlogbeat/Makefile b/winlogbeat/Makefile index 234ca977df6..ce08a70fb17 100644 --- a/winlogbeat/Makefile +++ b/winlogbeat/Makefile @@ -1,15 +1,17 @@ -BEAT_NAME=winlogbeat -BEAT_TITLE=Winlogbeat -SYSTEM_TESTS=true -TEST_ENVIRONMENT=false -GOX_OS=windows +# +# Variables +# +GOX_OS := windows -include ../libbeat/scripts/Makefile +# +# Includes +# +include ../dev-tools/make/oss.mk + +# +# Targets +# .PHONY: gen gen: GOOS=windows GOARCH=386 go generate -v -x ./... - -# Collects all dependencies and then calls update -.PHONY: collect -collect: diff --git a/winlogbeat/cmd/root.go b/winlogbeat/cmd/root.go index 4b89b926a3d..c81a81920d3 100644 --- a/winlogbeat/cmd/root.go +++ b/winlogbeat/cmd/root.go @@ -17,8 +17,13 @@ package cmd -import cmd "github.com/elastic/beats/libbeat/cmd" -import "github.com/elastic/beats/winlogbeat/beater" +import ( + cmd "github.com/elastic/beats/libbeat/cmd" + "github.com/elastic/beats/winlogbeat/beater" + + // Register fields. + _ "github.com/elastic/beats/winlogbeat/include" +) // Name of this beat var Name = "winlogbeat" diff --git a/winlogbeat/include/fields.go b/winlogbeat/include/fields.go index 48ba767e378..cd5de0db962 100644 --- a/winlogbeat/include/fields.go +++ b/winlogbeat/include/fields.go @@ -24,7 +24,7 @@ import ( ) func init() { - if err := asset.SetFields("winlogbeat", "fields.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("winlogbeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } diff --git a/winlogbeat/magefile.go b/winlogbeat/magefile.go index 0ca1411d2a7..6771ec56ca6 100644 --- a/winlogbeat/magefile.go +++ b/winlogbeat/magefile.go @@ -20,97 +20,32 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" - "github.com/magefile/mage/sh" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/docs" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + winlogbeat "github.com/elastic/beats/winlogbeat/scripts/mage" ) func init() { - mage.BeatDescription = "Winlogbeat ships Windows event logs to Elasticsearch or Logstash." - - mage.Platforms = mage.Platforms.Filter("windows") -} - -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild() -} - -// CrossBuildXPack cross-builds the beat with XPack for all target platforms. -func CrossBuildXPack() error { - return mage.CrossBuildXPack() -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() + winlogbeat.SelectLogic = mage.OSSProject } -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -// Use VERSION_QUALIFIER to control the version qualifier. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.UseElasticBeatPackaging() - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildXPack, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Update updates the generated files (aka make update). -func Update() error { - return sh.Run("make", "update") -} - -// Fields generates a fields.yml for the Beat. -func Fields() error { - return mage.GenerateFieldsYAML() -} - -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(winlogbeat.Update.All) } diff --git a/winlogbeat/main.go b/winlogbeat/main.go index fff4708a9fb..c561fdb9809 100644 --- a/winlogbeat/main.go +++ b/winlogbeat/main.go @@ -28,7 +28,6 @@ import ( "os" "github.com/elastic/beats/winlogbeat/cmd" - _ "github.com/elastic/beats/winlogbeat/include" ) func main() { diff --git a/winlogbeat/scripts/mage/config.go b/winlogbeat/scripts/mage/config.go new file mode 100644 index 00000000000..5de186cc604 --- /dev/null +++ b/winlogbeat/scripts/mage/config.go @@ -0,0 +1,48 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/elastic/beats/dev-tools/mage" +) + +// config generates short/reference configs. +func config() error { + // NOTE: No Docker config. + return mage.Config(mage.ShortConfigType|mage.ReferenceConfigType, configFileParams(), ".") +} + +func configFileParams() mage.ConfigFileParams { + return mage.ConfigFileParams{ + ShortParts: []string{ + mage.OSSBeatDir("_meta/beat.yml"), + mage.LibbeatDir("_meta/config.yml"), + }, + ReferenceParts: []string{ + mage.OSSBeatDir("_meta/beat.reference.yml"), + mage.LibbeatDir("_meta/config.reference.yml"), + }, + DockerParts: []string{ + mage.OSSBeatDir("_meta/beat.docker.yml"), + mage.LibbeatDir("_meta/config.docker.yml"), + }, + ExtraVars: map[string]interface{}{ + "GOOS": "windows", + }, + } +} diff --git a/winlogbeat/scripts/mage/fields.go b/winlogbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..bf40c644a4e --- /dev/null +++ b/winlogbeat/scripts/mage/fields.go @@ -0,0 +1,75 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { + switch SelectLogic { + case mage.OSSProject: + return b.commonFieldsGo() + case mage.XPackProject: + return nil + default: + panic(mage.ErrUnknownProjectType) + } +} + +func (fieldsBuilder) FieldsYML() error { + var modules []string + switch SelectLogic { + case mage.OSSProject, mage.XPackProject: + // No modules. + default: + panic(mage.ErrUnknownProjectType) + } + + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML, modules...); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (fieldsBuilder) FieldsAllYML() error { + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML) +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} diff --git a/winlogbeat/scripts/mage/package.go b/winlogbeat/scripts/mage/package.go new file mode 100644 index 00000000000..918011093ff --- /dev/null +++ b/winlogbeat/scripts/mage/package.go @@ -0,0 +1,56 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "fmt" + "time" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" +) + +func init() { + mage.BeatDescription = "Winlogbeat ships Windows event logs to Elasticsearch or Logstash." + + mage.Platforms = mage.Platforms.Filter("windows") +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() + + switch SelectLogic { + case mage.OSSProject: + mage.UseElasticBeatOSSPackaging() + case mage.XPackProject: + mage.UseElasticBeatXPackPackaging() + } + mage.PackageKibanaDashboardsFromBuildDir() + + mg.Deps(Update.All) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} diff --git a/winlogbeat/scripts/mage/update.go b/winlogbeat/scripts/mage/update.go new file mode 100644 index 00000000000..8dfa5f42cfa --- /dev/null +++ b/winlogbeat/scripts/mage/update.go @@ -0,0 +1,73 @@ +// Licensed to Elasticsearch B.V. under one or more contributor +// license agreements. See the NOTICE file distributed with +// this work for additional information regarding copyright +// ownership. Elasticsearch B.V. licenses this file to you under +// the Apache License, Version 2.0 (the "License"); you may +// not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +package mage + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/docs" + + "github.com/elastic/beats/dev-tools/mage/target/build" + + "github.com/elastic/beats/dev-tools/mage" +) + +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + docs.RegisterDeps(Update.FieldDocs) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Dashboards, Update.Config, Update.FieldDocs) +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + return mage.KibanaDashboards() +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// FieldDocs updates the field documentation. +func (Update) FieldDocs() error { + mg.Deps(fb.FieldsAllYML) + return mage.Docs.FieldDocs(mage.FieldsAllYML) +} diff --git a/x-pack/winlogbeat/Makefile b/x-pack/winlogbeat/Makefile new file mode 100644 index 00000000000..e21f2ba01a8 --- /dev/null +++ b/x-pack/winlogbeat/Makefile @@ -0,0 +1,9 @@ +# +# Variables +# +GOX_OS := windows + +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/winlogbeat/magefile.go b/x-pack/winlogbeat/magefile.go new file mode 100644 index 00000000000..09e08b7954e --- /dev/null +++ b/x-pack/winlogbeat/magefile.go @@ -0,0 +1,36 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +// +build mage + +package main + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + winlogbeat "github.com/elastic/beats/winlogbeat/scripts/mage" +) + +func init() { + winlogbeat.SelectLogic = mage.XPackProject +} + +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(winlogbeat.Update.All) } diff --git a/x-pack/winlogbeat/make.bat b/x-pack/winlogbeat/make.bat new file mode 100644 index 00000000000..81de1ba946f --- /dev/null +++ b/x-pack/winlogbeat/make.bat @@ -0,0 +1,11 @@ +@echo off + +REM Windows wrapper for Mage (https://magefile.org/) that installs it +REM to %GOPATH%\bin from the Beats vendor directory. +REM +REM After running this once you may invoke mage.exe directly. + +WHERE mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage + +mage %* diff --git a/x-pack/winlogbeat/winlogbeat.reference.yml b/x-pack/winlogbeat/winlogbeat.reference.yml new file mode 100644 index 00000000000..d2487c225ec --- /dev/null +++ b/x-pack/winlogbeat/winlogbeat.reference.yml @@ -0,0 +1,1170 @@ +########################## Winlogbeat Configuration ########################### + +# This file is a full configuration example documenting all non-deprecated +# options in comments. For a shorter configuration example, that contains only +# the most common options, please see winlogbeat.yml in the same directory. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/winlogbeat/index.html + +#======================= Winlogbeat specific options ========================== + +# The registry file is where Winlogbeat persists its state so that the beat +# can resume after shutdown or an outage. The default is .winlogbeat.yml +# in the directory in which it was started. +#winlogbeat.registry_file: .winlogbeat.yml + +# event_logs specifies a list of event logs to monitor as well as any +# accompanying options. The YAML data type of event_logs is a list of +# dictionaries. +# +# The supported keys are name (required), tags, fields, fields_under_root, +# forwarded, ignore_older, level, event_id, provider, and include_xml. Please +# visit the documentation for the complete details of each option. +# https://go.es.io/WinlogbeatConfig +winlogbeat.event_logs: + - name: Application + ignore_older: 72h + - name: Security + - name: System + +#================================ General ====================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +# If this options is not defined, the hostname is used. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. Tags make it easy to group servers by different +# logical properties. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. Fields can be scalar values, arrays, dictionaries, or any nested +# combination of these. +#fields: +# env: staging + +# If this option is set to true, the custom fields are stored as top-level +# fields in the output document instead of being grouped under a fields +# sub-dictionary. Default is false. +#fields_under_root: false + +# Internal queue configuration for buffering events to be published. +#queue: + # Queue type by name (default 'mem') + # The memory queue will present all available events (up to the outputs + # bulk_max_size) to the output, the moment the output is ready to server + # another batch of events. + #mem: + # Max number of events the queue can buffer. + #events: 4096 + + # Hints the minimum number of events stored in the queue, + # before providing a batch of events to the outputs. + # The default value is set to 2048. + # A value of 0 ensures events are immediately available + # to be sent to the outputs. + #flush.min_events: 2048 + + # Maximum duration after which events are available to the outputs, + # if the number of events stored in the queue is < min_flush_events. + #flush.timeout: 1s + + # The spool queue will store events in a local spool file, before + # forwarding the events to the outputs. + # + # Beta: spooling to disk is currently a beta feature. Use with care. + # + # The spool file is a circular buffer, which blocks once the file/buffer is full. + # Events are put into a write buffer and flushed once the write buffer + # is full or the flush_timeout is triggered. + # Once ACKed by the output, events are removed immediately from the queue, + # making space for new events to be persisted. + #spool: + # The file namespace configures the file path and the file creation settings. + # Once the file exists, the `size`, `page_size` and `prealloc` settings + # will have no more effect. + #file: + # Location of spool file. The default value is ${path.data}/spool.dat. + #path: "${path.data}/spool.dat" + + # Configure file permissions if file is created. The default value is 0600. + #permissions: 0600 + + # File size hint. The spool blocks, once this limit is reached. The default value is 100 MiB. + #size: 100MiB + + # The files page size. A file is split into multiple pages of the same size. The default value is 4KiB. + #page_size: 4KiB + + # If prealloc is set, the required space for the file is reserved using + # truncate. The default value is true. + #prealloc: true + + # Spool writer settings + # Events are serialized into a write buffer. The write buffer is flushed if: + # - The buffer limit has been reached. + # - The configured limit of buffered events is reached. + # - The flush timeout is triggered. + #write: + # Sets the write buffer size. + #buffer_size: 1MiB + + # Maximum duration after which events are flushed, if the write buffer + # is not full yet. The default value is 1s. + #flush.timeout: 1s + + # Number of maximum buffered events. The write buffer is flushed once the + # limit is reached. + #flush.events: 16384 + + # Configure the on-disk event encoding. The encoding can be changed + # between restarts. + # Valid encodings are: json, ubjson, and cbor. + #codec: cbor + #read: + # Reader flush timeout, waiting for more events to become available, so + # to fill a complete batch, as required by the outputs. + # If flush_timeout is 0, all available events are forwarded to the + # outputs immediately. + # The default value is 0s. + #flush.timeout: 0s + +# Sets the maximum number of CPUs that can be executing simultaneously. The +# default is the number of logical CPUs available in the system. +#max_procs: + +#================================ Processors =================================== + +# Processors are used to reduce the number of fields in the exported event or to +# enhance the event with external metadata. This section defines a list of +# processors that are applied one by one and the first one receives the initial +# event: +# +# event -> filter1 -> event1 -> filter2 ->event2 ... +# +# The supported processors are drop_fields, drop_event, include_fields, +# decode_json_fields, and add_cloud_metadata. +# +# For example, you can use the following processors to keep the fields that +# contain CPU load percentages, but remove the fields that contain CPU ticks +# values: +# +#processors: +#- include_fields: +# fields: ["cpu"] +#- drop_fields: +# fields: ["cpu.user", "cpu.system"] +# +# The following example drops the events that have the HTTP response code 200: +# +#processors: +#- drop_event: +# when: +# equals: +# http.code: 200 +# +# The following example renames the field a to b: +# +#processors: +#- rename: +# fields: +# - from: "a" +# to: "b" +# +# The following example tokenizes the string into fields: +# +#processors: +#- dissect: +# tokenizer: "%{key1} - %{key2}" +# field: "message" +# target_prefix: "dissect" +# +# The following example enriches each event with metadata from the cloud +# provider about the host machine. It works on EC2, GCE, DigitalOcean, +# Tencent Cloud, and Alibaba Cloud. +# +#processors: +#- add_cloud_metadata: ~ +# +# The following example enriches each event with the machine's local time zone +# offset from UTC. +# +#processors: +#- add_locale: +# format: offset +# +# The following example enriches each event with docker metadata, it matches +# given fields to an existing container id and adds info from that container: +# +#processors: +#- add_docker_metadata: +# host: "unix:///var/run/docker.sock" +# match_fields: ["system.process.cgroup.id"] +# match_pids: ["process.pid", "process.ppid"] +# match_source: true +# match_source_index: 4 +# match_short_id: false +# cleanup_timeout: 60 +# labels.dedot: false +# # To connect to Docker over TLS you must specify a client and CA certificate. +# #ssl: +# # certificate_authority: "/etc/pki/root/ca.pem" +# # certificate: "/etc/pki/client/cert.pem" +# # key: "/etc/pki/client/cert.key" +# +# The following example enriches each event with docker metadata, it matches +# container id from log path available in `source` field (by default it expects +# it to be /var/lib/docker/containers/*/*.log). +# +#processors: +#- add_docker_metadata: ~ +# +# The following example enriches each event with host metadata. +# +#processors: +#- add_host_metadata: +# netinfo.enabled: false +# +# The following example enriches each event with process metadata using +# process IDs included in the event. +# +#processors: +#- add_process_metadata: +# match_pids: ["system.process.ppid"] +# target: system.process.parent +# +# The following example decodes fields containing JSON strings +# and replaces the strings with valid JSON objects. +# +#processors: +#- decode_json_fields: +# fields: ["field1", "field2", ...] +# process_array: false +# max_depth: 1 +# target: "" +# overwrite_keys: false + +#============================= Elastic Cloud ================================== + +# These settings simplify using winlogbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ====================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------- +output.elasticsearch: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + #ilm.rollover_alias: "winlogbeat" + #ilm.pattern: "{now/d}-000001" + + # Set gzip compression level. + #compression_level: 0 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Number of workers per Elasticsearch host. + #worker: 1 + + # Optional index name. The default is "winlogbeat" plus date + # and generates [winlogbeat-]YYYY.MM.DD keys. + # In case you modify this pattern you must update setup.template.name and setup.template.pattern accordingly. + #index: "winlogbeat-%{[beat.version]}-%{+yyyy.MM.dd}" + + # Optional ingest node pipeline. By default no pipeline will be used. + #pipeline: "" + + # Optional HTTP Path + #path: "/elasticsearch" + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing a request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + +#----------------------------- Logstash output --------------------------------- +#output.logstash: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Number of workers per Logstash host. + #worker: 1 + + # Set gzip compression level. + #compression_level: 3 + + # Configure escaping html symbols in strings. + #escape_html: true + + # Optional maximum time to live for a connection to Logstash, after which the + # connection will be re-established. A value of `0s` (the default) will + # disable this feature. + # + # Not yet supported for async connections (i.e. with the "pipelining" option set) + #ttl: 30s + + # Optional load balance the events between the Logstash hosts. Default is false. + #loadbalance: false + + # Number of batches to be sent asynchronously to Logstash while processing + # new batches. + #pipelining: 2 + + # If enabled only a subset of events in a batch of events is transferred per + # transaction. The number of events to be sent increases up to `bulk_max_size` + # if no error is encountered. + #slow_start: false + + # The number of seconds to wait before trying to reconnect to Logstash + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Logstash after a network error. The default is 60s. + #backoff.max: 60s + + # Optional index name. The default index name is set to winlogbeat + # in all lowercase. + #index: 'winlogbeat' + + # SOCKS5 proxy server URL + #proxy_url: socks5://user:password@socks5-server:2233 + + # Resolve names locally when using a proxy server. Defaults to false. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat and Winlogbeat, ignore the max_retries setting + # and retry until all events are published. Set max_retries to a value less + # than 0 to retry until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Logstash request. The + # default is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Logstash server before + # timing out. The default is 30s. + #timeout: 30s + +#------------------------------- Kafka output ---------------------------------- +#output.kafka: + # Boolean flag to enable or disable the output module. + #enabled: true + + # The list of Kafka broker addresses from where to fetch the cluster metadata. + # The cluster metadata contain the actual Kafka brokers events are published + # to. + #hosts: ["localhost:9092"] + + # The Kafka topic used for produced events. The setting can be a format string + # using any event field. To set the topic from document type use `%{[type]}`. + #topic: beats + + # The Kafka event key setting. Use format string to create unique event key. + # By default no event key will be generated. + #key: '' + + # The Kafka event partitioning strategy. Default hashing strategy is `hash` + # using the `output.kafka.key` setting or randomly distributes events if + # `output.kafka.key` is not configured. + #partition.hash: + # If enabled, events will only be published to partitions with reachable + # leaders. Default is false. + #reachable_only: false + + # Configure alternative event field names used to compute the hash value. + # If empty `output.kafka.key` setting will be used. + # Default value is empty list. + #hash: [] + + # Authentication details. Password is required if username is set. + #username: '' + #password: '' + + # Kafka version winlogbeat is assumed to run against. Defaults to the "1.0.0". + #version: '1.0.0' + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Metadata update configuration. Metadata do contain leader information + # deciding which broker to use when publishing. + #metadata: + # Max metadata request retry attempts when cluster is in middle of leader + # election. Defaults to 3 retries. + #retry.max: 3 + + # Waiting time between retries during leader elections. Default is 250ms. + #retry.backoff: 250ms + + # Refresh metadata interval. Defaults to every 10 minutes. + #refresh_frequency: 10m + + # The number of concurrent load-balanced Kafka output workers. + #worker: 1 + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Kafka request. The default + # is 2048. + #bulk_max_size: 2048 + + # The number of seconds to wait for responses from the Kafka brokers before + # timing out. The default is 30s. + #timeout: 30s + + # The maximum duration a broker will wait for number of required ACKs. The + # default is 10s. + #broker_timeout: 10s + + # The number of messages buffered for each Kafka broker. The default is 256. + #channel_buffer_size: 256 + + # The keep-alive period for an active network connection. If 0s, keep-alives + # are disabled. The default is 0 seconds. + #keep_alive: 0 + + # Sets the output compression codec. Must be one of none, snappy and gzip. The + # default is gzip. + #compression: gzip + + # Set the compression level. Currently only gzip provides a compression level + # between 0 and 9. The default value is chosen by the compression algorithm. + #compression_level: 4 + + # The maximum permitted size of JSON-encoded messages. Bigger messages will be + # dropped. The default value is 1000000 (bytes). This value should be equal to + # or less than the broker's message.max.bytes. + #max_message_bytes: 1000000 + + # The ACK reliability level required from broker. 0=no response, 1=wait for + # local commit, -1=wait for all replicas to commit. The default is 1. Note: + # If set to 0, no ACKs are returned by Kafka. Messages might be lost silently + # on error. + #required_acks: 1 + + # The configurable ClientID used for logging, debugging, and auditing + # purposes. The default is "beats". + #client_id: beats + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- Redis output ---------------------------------- +#output.redis: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # The list of Redis servers to connect to. If load balancing is enabled, the + # events are distributed to the servers in the list. If one server becomes + # unreachable, the events are distributed to the reachable servers only. + #hosts: ["localhost:6379"] + + # The Redis port to use if hosts does not contain a port number. The default + # is 6379. + #port: 6379 + + # The name of the Redis list or channel the events are published to. The + # default is winlogbeat. + #key: winlogbeat + + # The password to authenticate with. The default is no authentication. + #password: + + # The Redis database number where the events are published. The default is 0. + #db: 0 + + # The Redis data type to use for publishing events. If the data type is list, + # the Redis RPUSH command is used. If the data type is channel, the Redis + # PUBLISH command is used. The default value is list. + #datatype: list + + # The number of workers to use for each host configured to publish events to + # Redis. Use this setting along with the loadbalance option. For example, if + # you have 2 hosts and 3 workers, in total 6 workers are started (3 for each + # host). + #worker: 1 + + # If set to true and multiple hosts or workers are configured, the output + # plugin load balances published events onto all Redis hosts. If set to false, + # the output plugin sends all events to only one host (determined at random) + # and will switch to another host if the currently selected one becomes + # unreachable. The default value is true. + #loadbalance: true + + # The Redis connection timeout in seconds. The default is 5 seconds. + #timeout: 5s + + # The number of times to retry publishing an event after a publishing failure. + # After the specified number of retries, the events are typically dropped. + # Some Beats, such as Filebeat, ignore the max_retries setting and retry until + # all events are published. Set max_retries to a value less than 0 to retry + # until all events are published. The default is 3. + #max_retries: 3 + + # The number of seconds to wait before trying to reconnect to Redis + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Redis after a network error. The default is 60s. + #backoff.max: 60s + + # The maximum number of events to bulk in a single Redis request or pipeline. + # The default is 2048. + #bulk_max_size: 2048 + + # The URL of the SOCKS5 proxy to use when connecting to the Redis servers. The + # value must be a URL with a scheme of socks5://. + #proxy_url: + + # This option determines whether Redis hostnames are resolved locally when + # using a proxy. The default value is false, which means that name resolution + # occurs on the proxy server. + #proxy_use_local_resolver: false + + # Enable SSL support. SSL is automatically enabled, if any SSL setting is set. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # Optional SSL configuration options. SSL is off by default. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + +#------------------------------- File output ----------------------------------- +#output.file: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + + # Path to the directory where to save the generated files. The option is + # mandatory. + #path: "/tmp/winlogbeat" + + # Name of the generated files. The default is `winlogbeat` and it generates + # files: `winlogbeat`, `winlogbeat.1`, `winlogbeat.2`, etc. + #filename: winlogbeat + + # Maximum size in kilobytes of each file. When this size is reached, and on + # every winlogbeat restart, the files are rotated. The default value is 10240 + # kB. + #rotate_every_kb: 10000 + + # Maximum number of files under path. When this number of files is reached, + # the oldest file is deleted and the rest are shifted from last to first. The + # default is 7 files. + #number_of_files: 7 + + # Permissions to use for file creation. The default is 0600. + #permissions: 0600 + + +#----------------------------- Console output --------------------------------- +#output.console: + # Boolean flag to enable or disable the output module. + #enabled: true + + # Configure JSON encoding + #codec.json: + # Pretty print json event + #pretty: false + + # Configure escaping html symbols in strings. + #escape_html: true + +#================================= Paths ====================================== + +# The home path for the winlogbeat installation. This is the default base path +# for all other path settings and for miscellaneous files that come with the +# distribution (for example, the sample dashboards). +# If not set by a CLI flag or in the configuration file, the default for the +# home path is the location of the binary. +#path.home: + +# The configuration path for the winlogbeat installation. This is the default +# base path for configuration files, including the main YAML configuration file +# and the Elasticsearch template file. If not set by a CLI flag or in the +# configuration file, the default for the configuration path is the home path. +#path.config: ${path.home} + +# The data path for the winlogbeat installation. This is the default base path +# for all the files in which winlogbeat needs to store its data. If not set by a +# CLI flag or in the configuration file, the default for the data path is a data +# subdirectory inside the home path. +#path.data: ${path.home}/data + +# The logs path for a winlogbeat installation. This is the default location for +# the Beat's log files. If not set by a CLI flag or in the configuration file, +# the default for the logs path is a logs subdirectory inside the home path. +#path.logs: ${path.home}/logs + +#================================ Keystore ========================================== +# Location of the Keystore containing the keys and their sensitive values. +#keystore.path: "${path.config}/beats.keystore" + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards are disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The directory from where to read the dashboards. The default is the `kibana` +# folder in the home path. +#setup.dashboards.directory: ${path.home}/kibana + +# The URL from where to download the dashboards archive. It is used instead of +# the directory if it has a value. +#setup.dashboards.url: + +# The file archive (zip file) from where to read the dashboards. It is used instead +# of the directory when it has a value. +#setup.dashboards.file: + +# In case the archive contains the dashboards from multiple Beats, this lets you +# select which one to load. You can load all the dashboards in the archive by +# setting this to the empty string. +#setup.dashboards.beat: winlogbeat + +# The name of the Kibana index to use for setting the configuration. Default is ".kibana" +#setup.dashboards.kibana_index: .kibana + +# The Elasticsearch index name. This overwrites the index name defined in the +# dashboards and index pattern. Example: testbeat-* +#setup.dashboards.index: + +# Always use the Kibana API for loading the dashboards instead of autodetecting +# how to install the dashboards by first querying Elasticsearch. +#setup.dashboards.always_kibana: false + +# If true and Kibana is not reachable at the time when dashboards are loaded, +# it will retry to reconnect to Kibana instead of exiting with an error. +#setup.dashboards.retry.enabled: false + +# Duration interval between Kibana connection retries. +#setup.dashboards.retry.interval: 1s + +# Maximum number of retries before exiting with an error, 0 for unlimited retrying. +#setup.dashboards.retry.maximum: 0 + + +#============================== Template ===================================== + +# A template is used to set the mapping in Elasticsearch +# By default template loading is enabled and the template is loaded. +# These settings can be adjusted to load your own template or overwrite existing ones. + +# Set to false to disable template loading. +#setup.template.enabled: true + +# Template name. By default the template name is "winlogbeat-%{[beat.version]}" +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.name: "winlogbeat-%{[beat.version]}" + +# Template pattern. By default the template pattern is "-%{[beat.version]}-*" to apply to the default index settings. +# The first part is the version of the beat and then -* is used to match all daily indices. +# The template name and pattern has to be set in case the elasticsearch index pattern is modified. +#setup.template.pattern: "winlogbeat-%{[beat.version]}-*" + +# Path to fields.yml file to generate the template +#setup.template.fields: "${path.config}/fields.yml" + +# A list of fields to be added to the template and Kibana index pattern. Also +# specify setup.template.overwrite: true to overwrite the existing template. +# This setting is experimental. +#setup.template.append_fields: +#- name: field_name +# type: field_type + +# Enable json template loading. If this is enabled, the fields.yml is ignored. +#setup.template.json.enabled: false + +# Path to the json template file +#setup.template.json.path: "${path.config}/template.json" + +# Name under which the template is stored in Elasticsearch +#setup.template.json.name: "" + +# Overwrite existing template +#setup.template.overwrite: false + +# Elasticsearch template settings +setup.template.settings: + + # A dictionary of settings to place into the settings.index dictionary + # of the Elasticsearch template. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping.html + #index: + #number_of_shards: 1 + #codec: best_compression + #number_of_routing_shards: 30 + + # A dictionary of settings for the _source field. For more details, please check + # https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-source-field.html + #_source: + #enabled: false + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + + # Optional HTTP Path + #path: "" + + # Use SSL settings for HTTPS. Default is true. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + + +#================================ Logging ====================================== +# There are four options for the log output: file, stderr, syslog, eventlog +# The file output is the default. + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: info + +# Enable debug output for selected components. To enable all selectors use ["*"] +# Other available selectors are "beat", "publish", "service" +# Multiple selectors can be chained. +#logging.selectors: [ ] + +# Send all logging output to syslog. The default is false. +#logging.to_syslog: false + +# Send all logging output to Windows Event Logs. The default is false. +#logging.to_eventlog: false + +# If enabled, winlogbeat periodically logs its internal metrics that have changed +# in the last period. For each metric that changed, the delta from the value at +# the beginning of the period is logged. Also, the total values for +# all non-zero internal metrics are logged on shutdown. The default is true. +#logging.metrics.enabled: true + +# The period after which to log the internal metrics. The default is 30s. +#logging.metrics.period: 30s + +# Logging to rotating files. Set logging.to_files to false to disable logging to +# files. +logging.to_files: true +logging.files: + # Configure the path where the logs are written. The default is the logs directory + # under the home path (the binary location). + #path: /var/log/winlogbeat + + # The name of the files where the logs are written to. + #name: winlogbeat + + # Configure log file size limit. If limit is reached, log file will be + # automatically rotated + #rotateeverybytes: 10485760 # = 10MB + + # Number of rotated log files to keep. Oldest files will be deleted first. + #keepfiles: 7 + + # The permissions mask to apply when rotating log files. The default value is 0600. + # Must be a valid Unix-style file permissions mask expressed in octal notation. + #permissions: 0600 + + # Enable log file rotation on time intervals in addition to size-based rotation. + # Intervals must be at least 1s. Values of 1m, 1h, 24h, 7*24h, 30*24h, and 365*24h + # are boundary-aligned with minutes, hours, days, weeks, months, and years as + # reported by the local system clock. All other intervals are calculated from the + # unix epoch. Defaults to disabled. + #interval: 0 + +# Set to true to log messages in json format. +#logging.json: false + + +#============================== Xpack Monitoring ===================================== +# winlogbeat can export internal metrics to a central Elasticsearch monitoring cluster. +# This requires xpack monitoring to be enabled in Elasticsearch. +# The reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line, and leave the rest commented out. +#xpack.monitoring.elasticsearch: + + # Array of hosts to connect to. + # Scheme and port can be left out and will be set to the default (http and 9200) + # In case you specify and additional path, the scheme is required: http://localhost:9200/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:9200 + #hosts: ["localhost:9200"] + + # Set gzip compression level. + #compression_level: 0 + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "beats_system" + #password: "changeme" + + # Dictionary of HTTP parameters to pass within the url with index operations. + #parameters: + #param1: value1 + #param2: value2 + + # Custom HTTP headers to add to each request + #headers: + # X-My-Header: Contents of the header + + # Proxy server url + #proxy_url: http://proxy:3128 + + # The number of times a particular Elasticsearch index operation is attempted. If + # the indexing operation doesn't succeed after this many retries, the events are + # dropped. The default is 3. + #max_retries: 3 + + # The maximum number of events to bulk in a single Elasticsearch bulk API index request. + # The default is 50. + #bulk_max_size: 50 + + # The number of seconds to wait before trying to reconnect to Elasticsearch + # after a network error. After waiting backoff.init seconds, the Beat + # tries to reconnect. If the attempt fails, the backoff timer is increased + # exponentially up to backoff.max. After a successful connection, the backoff + # timer is reset. The default is 1s. + #backoff.init: 1s + + # The maximum number of seconds to wait before attempting to connect to + # Elasticsearch after a network error. The default is 60s. + #backoff.max: 60s + + # Configure http request timeout before failing an request to Elasticsearch. + #timeout: 90 + + # Use SSL settings for HTTPS. + #ssl.enabled: true + + # Configure SSL verification mode. If `none` is configured, all server hosts + # and certificates will be accepted. In this mode, SSL based connections are + # susceptible to man-in-the-middle attacks. Use only for testing. Default is + # `full`. + #ssl.verification_mode: full + + # List of supported/valid TLS versions. By default all TLS versions 1.0 up to + # 1.2 are enabled. + #ssl.supported_protocols: [TLSv1.0, TLSv1.1, TLSv1.2] + + # SSL configuration. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + + # Optional passphrase for decrypting the Certificate Key. + #ssl.key_passphrase: '' + + # Configure cipher suites to be used for SSL connections + #ssl.cipher_suites: [] + + # Configure curve types for ECDHE based cipher suites + #ssl.curve_types: [] + + # Configure what types of renegotiation are supported. Valid options are + # never, once, and freely. Default is never. + #ssl.renegotiation: never + + #metrics.period: 10s + #state.period: 1m + +#================================ HTTP Endpoint ====================================== +# Each beat can expose internal metrics through a HTTP endpoint. For security +# reasons the endpoint is disabled by default. This feature is currently experimental. +# Stats can be access through http://localhost:5066/stats . For pretty JSON output +# append ?pretty to the URL. + +# Defines if the HTTP endpoint is enabled. +#http.enabled: false + +# The HTTP endpoint will bind to this hostname or IP address. It is recommended to use only localhost. +#http.host: localhost + +# Port on which the HTTP endpoint will bind. Default is 5066. +#http.port: 5066 + +#============================= Process Security ================================ + +# Enable or disable seccomp system call filtering on Linux. Default is enabled. +#seccomp.enabled: true diff --git a/x-pack/winlogbeat/winlogbeat.yml b/x-pack/winlogbeat/winlogbeat.yml new file mode 100644 index 00000000000..7661b770cda --- /dev/null +++ b/x-pack/winlogbeat/winlogbeat.yml @@ -0,0 +1,155 @@ +###################### Winlogbeat Configuration Example ########################## + +# This file is an example configuration file highlighting only the most common +# options. The winlogbeat.reference.yml file from the same directory contains all the +# supported options with more comments. You can use it as a reference. +# +# You can find the full configuration reference here: +# https://www.elastic.co/guide/en/beats/winlogbeat/index.html + +#======================= Winlogbeat specific options ========================== + +# event_logs specifies a list of event logs to monitor as well as any +# accompanying options. The YAML data type of event_logs is a list of +# dictionaries. +# +# The supported keys are name (required), tags, fields, fields_under_root, +# forwarded, ignore_older, level, event_id, provider, and include_xml. Please +# visit the documentation for the complete details of each option. +# https://go.es.io/WinlogbeatConfig +winlogbeat.event_logs: + - name: Application + ignore_older: 72h + - name: Security + - name: System + +#==================== Elasticsearch template setting ========================== + +setup.template.settings: + index.number_of_shards: 3 + #index.codec: best_compression + #_source.enabled: false + +#================================ General ===================================== + +# The name of the shipper that publishes the network data. It can be used to group +# all the transactions sent by a single shipper in the web interface. +#name: + +# The tags of the shipper are included in their own field with each +# transaction published. +#tags: ["service-X", "web-tier"] + +# Optional fields that you can specify to add additional information to the +# output. +#fields: +# env: staging + + +#============================== Dashboards ===================================== +# These settings control loading the sample dashboards to the Kibana index. Loading +# the dashboards is disabled by default and can be enabled either by setting the +# options here, or by using the `-setup` CLI flag or the `setup` command. +#setup.dashboards.enabled: false + +# The URL from where to download the dashboards archive. By default this URL +# has a value which is computed based on the Beat name and version. For released +# versions, this URL points to the dashboard archive on the artifacts.elastic.co +# website. +#setup.dashboards.url: + +#============================== Kibana ===================================== + +# Starting with Beats version 6.0.0, the dashboards are loaded via the Kibana API. +# This requires a Kibana endpoint configuration. +setup.kibana: + + # Kibana Host + # Scheme and port can be left out and will be set to the default (http and 5601) + # In case you specify and additional path, the scheme is required: http://localhost:5601/path + # IPv6 addresses should always be defined as: https://[2001:db8::1]:5601 + #host: "localhost:5601" + + # Kibana Space ID + # ID of the Kibana Space into which the dashboards should be loaded. By default, + # the Default Space will be used. + #space.id: + +#============================= Elastic Cloud ================================== + +# These settings simplify using winlogbeat with the Elastic Cloud (https://cloud.elastic.co/). + +# The cloud.id setting overwrites the `output.elasticsearch.hosts` and +# `setup.kibana.host` options. +# You can find the `cloud.id` in the Elastic Cloud web UI. +#cloud.id: + +# The cloud.auth setting overwrites the `output.elasticsearch.username` and +# `output.elasticsearch.password` settings. The format is `:`. +#cloud.auth: + +#================================ Outputs ===================================== + +# Configure what output to use when sending the data collected by the beat. + +#-------------------------- Elasticsearch output ------------------------------ +output.elasticsearch: + # Array of hosts to connect to. + hosts: ["localhost:9200"] + + # Enabled ilm (beta) to use index lifecycle management instead daily indices. + #ilm.enabled: false + + # Optional protocol and basic auth credentials. + #protocol: "https" + #username: "elastic" + #password: "changeme" + +#----------------------------- Logstash output -------------------------------- +#output.logstash: + # The Logstash hosts + #hosts: ["localhost:5044"] + + # Optional SSL. By default is off. + # List of root certificates for HTTPS server verifications + #ssl.certificate_authorities: ["/etc/pki/root/ca.pem"] + + # Certificate for SSL client authentication + #ssl.certificate: "/etc/pki/client/cert.pem" + + # Client Certificate Key + #ssl.key: "/etc/pki/client/cert.key" + +#================================ Processors ===================================== + +# Configure processors to enhance or manipulate events generated by the beat. + +processors: + - add_host_metadata: ~ + - add_cloud_metadata: ~ + +#================================ Logging ===================================== + +# Sets log level. The default log level is info. +# Available log levels are: error, warning, info, debug +#logging.level: debug + +# At debug level, you can selectively enable logging only for some components. +# To enable all selectors use ["*"]. Examples of other selectors are "beat", +# "publish", "service". +#logging.selectors: ["*"] + +#============================== Xpack Monitoring =============================== +# winlogbeat can export internal metrics to a central Elasticsearch monitoring +# cluster. This requires xpack monitoring to be enabled in Elasticsearch. The +# reporting is disabled by default. + +# Set to true to enable the monitoring reporter. +#xpack.monitoring.enabled: false + +# Uncomment to send the metrics to Elasticsearch. Most settings from the +# Elasticsearch output are accepted here as well. Any setting that is not set is +# automatically inherited from the Elasticsearch output configuration, so if you +# have the Elasticsearch output configured, you can simply uncomment the +# following line. +#xpack.monitoring.elasticsearch: From c26e70c1139de1ac36528d312c1513c4d4e044c0 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Tue, 1 Jan 2019 23:59:26 -0500 Subject: [PATCH 11/18] Refactor functionbeat build logic --- x-pack/functionbeat/.gitignore | 10 -- x-pack/functionbeat/Makefile | 26 +---- x-pack/functionbeat/docker-compose.yml | 2 +- x-pack/functionbeat/include/fields.go | 2 +- x-pack/functionbeat/magefile.go | 99 ++++--------------- x-pack/functionbeat/make.bat | 2 +- x-pack/functionbeat/scripts/mage/config.go | 25 +++++ x-pack/functionbeat/scripts/mage/fields.go | 45 +++++++++ x-pack/functionbeat/scripts/mage/package.go | 35 +++++++ .../packaging => scripts/mage}/packages.yml | 1 + x-pack/functionbeat/scripts/mage/update.go | 60 +++++++++++ 11 files changed, 193 insertions(+), 114 deletions(-) delete mode 100644 x-pack/functionbeat/.gitignore create mode 100644 x-pack/functionbeat/scripts/mage/config.go create mode 100644 x-pack/functionbeat/scripts/mage/fields.go create mode 100644 x-pack/functionbeat/scripts/mage/package.go rename x-pack/functionbeat/{dev-tools/packaging => scripts/mage}/packages.yml (99%) create mode 100644 x-pack/functionbeat/scripts/mage/update.go diff --git a/x-pack/functionbeat/.gitignore b/x-pack/functionbeat/.gitignore deleted file mode 100644 index e89322a2657..00000000000 --- a/x-pack/functionbeat/.gitignore +++ /dev/null @@ -1,10 +0,0 @@ -.idea -.vagrant -.vscode -/*/_meta/kibana.generated -functionbeat -functionbeat.test -build -data -logs -./fields.yml diff --git a/x-pack/functionbeat/Makefile b/x-pack/functionbeat/Makefile index 876f451476c..7427a5c672b 100644 --- a/x-pack/functionbeat/Makefile +++ b/x-pack/functionbeat/Makefile @@ -1,22 +1,4 @@ -BEAT_NAME?=functionbeat -LICENSE=Elastic -BEAT_TITLE?=Functionbeat -SYSTEM_TESTS?=true -BEAT_PATH?=github.com/elastic/beats/x-pack/${BEAT_NAME} -TEST_ENVIRONMENT?=true -GOX_FLAGS=-arch="amd64 386 arm ppc64 ppc64le" -ES_BEATS?=../../ -FIELDS_FILE_PATH=module -XPACK_ONLY?=true - -# Path to the libbeat Makefile -include $(ES_BEATS)/libbeat/scripts/Makefile - -# Runs all collection steps and updates afterwards -.PHONY: collect -collect: - -# Generate an artifact to be push on serverless provider. -.PHONY: linux -linux: - GOOS=linux go build -o pkg/functionbeat +# +# Includes +# +include ../../dev-tools/make/xpack.mk diff --git a/x-pack/functionbeat/docker-compose.yml b/x-pack/functionbeat/docker-compose.yml index b60040f7dee..282af8f094c 100644 --- a/x-pack/functionbeat/docker-compose.yml +++ b/x-pack/functionbeat/docker-compose.yml @@ -5,7 +5,7 @@ services: depends_on: - proxy_dep env_file: - - ${PWD}/build/test.env + - ${PWD}/../../testing/environments/test.env working_dir: /go/src/github.com/elastic/beats/x-pack/functionbeat volumes: - ${PWD}/../..:/go/src/github.com/elastic/beats/ diff --git a/x-pack/functionbeat/include/fields.go b/x-pack/functionbeat/include/fields.go index 5159fd56bd8..037f4ec9dd5 100644 --- a/x-pack/functionbeat/include/fields.go +++ b/x-pack/functionbeat/include/fields.go @@ -11,7 +11,7 @@ import ( ) func init() { - if err := asset.SetFields("functionbeat", "fields.yml", asset.BeatFieldsPri, Asset); err != nil { + if err := asset.SetFields("functionbeat", "build/fields/fields.common.yml", asset.BeatFieldsPri, Asset); err != nil { panic(err) } } diff --git a/x-pack/functionbeat/magefile.go b/x-pack/functionbeat/magefile.go index 12a9597bf28..f3418b6cab5 100644 --- a/x-pack/functionbeat/magefile.go +++ b/x-pack/functionbeat/magefile.go @@ -7,91 +7,32 @@ package main import ( - "context" - "fmt" - "time" - "github.com/magefile/mage/mg" - "github.com/magefile/mage/sh" "github.com/elastic/beats/dev-tools/mage" + + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/common" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/build" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/pkg" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/dashboards" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/test" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/unittest" + // mage:import + _ "github.com/elastic/beats/dev-tools/mage/target/integtest" + // mage:import + functionbeat "github.com/elastic/beats/x-pack/functionbeat/scripts/mage" ) func init() { - mage.BeatDescription = "Functionbeat is a beat implementation for a serverless architecture." mage.BeatLicense = "Elastic License" } -// Build builds the Beat binary. -func Build() error { - return mage.Build(mage.DefaultBuildArgs()) -} - -// GolangCrossBuild build the Beat binary inside of the golang-builder. -// Do not use directly, use crossBuild instead. -func GolangCrossBuild() error { - return mage.GolangCrossBuild(mage.DefaultGolangCrossBuildArgs()) -} - -// BuildGoDaemon builds the go-daemon binary (use crossBuildGoDaemon). -func BuildGoDaemon() error { - return mage.BuildGoDaemon() -} - -// CrossBuild cross-builds the beat for all target platforms. -func CrossBuild() error { - return mage.CrossBuild(mage.AddPlatforms("linux/amd64")) -} - -// CrossBuildGoDaemon cross-builds the go-daemon binary using Docker. -func CrossBuildGoDaemon() error { - return mage.CrossBuildGoDaemon() -} - -// Clean cleans all generated files and build artifacts. -func Clean() error { - return mage.Clean() -} - -// Package packages the Beat for distribution. -// Use SNAPSHOT=true to build snapshots. -// Use PLATFORMS to control the target platforms. -func Package() { - start := time.Now() - defer func() { fmt.Println("package ran for", time.Since(start)) }() - - mage.MustUsePackaging("functionbeat", "x-pack/functionbeat/dev-tools/packaging/packages.yml") - - mg.Deps(Update) - mg.Deps(CrossBuild, CrossBuildGoDaemon) - mg.SerialDeps(mage.Package, TestPackages) -} - -// TestPackages tests the generated packages (i.e. file modes, owners, groups). -func TestPackages() error { - return mage.TestPackages() -} - -// Update updates the generated files (aka make update). -func Update() error { - return sh.Run("make", "update") -} - -// Fields generates a fields.yml for the Beat. -func Fields() error { - return mage.GenerateFieldsYAML() -} - -// GoTestUnit executes the Go unit tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestUnit(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestUnitArgs()) -} - -// GoTestIntegration executes the Go integration tests. -// Use TEST_COVERAGE=true to enable code coverage profiling. -// Use RACE_DETECTOR=true to enable the race detector. -func GoTestIntegration(ctx context.Context) error { - return mage.GoTest(ctx, mage.DefaultGoTestIntegrationArgs()) -} +// Update is an alias for update:all. This is a workaround for +// https://github.com/magefile/mage/issues/217. +func Update() { mg.Deps(functionbeat.Update.All) } diff --git a/x-pack/functionbeat/make.bat b/x-pack/functionbeat/make.bat index 9fe6b2b801e..81de1ba946f 100644 --- a/x-pack/functionbeat/make.bat +++ b/x-pack/functionbeat/make.bat @@ -6,6 +6,6 @@ REM REM After running this once you may invoke mage.exe directly. WHERE mage -IF %ERRORLEVEL% NEQ 0 go install github.com/ph/functionbeat/vendor/github.com/magefile/mage +IF %ERRORLEVEL% NEQ 0 go install github.com/elastic/beats/vendor/github.com/magefile/mage mage %* diff --git a/x-pack/functionbeat/scripts/mage/config.go b/x-pack/functionbeat/scripts/mage/config.go new file mode 100644 index 00000000000..d3c21fc5839 --- /dev/null +++ b/x-pack/functionbeat/scripts/mage/config.go @@ -0,0 +1,25 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package mage + +import "github.com/elastic/beats/dev-tools/mage" + +// config generates short/reference configs. +func config() error { + return mage.Config(mage.ShortConfigType|mage.ReferenceConfigType, configFileParams(), ".") +} + +func configFileParams() mage.ConfigFileParams { + return mage.ConfigFileParams{ + ShortParts: []string{ + mage.OSSBeatDir("_meta/beat.yml"), + mage.LibbeatDir("_meta/config.yml"), + }, + ReferenceParts: []string{ + mage.OSSBeatDir("_meta/beat.reference.yml"), + mage.LibbeatDir("_meta/config.reference.yml"), + }, + } +} diff --git a/x-pack/functionbeat/scripts/mage/fields.go b/x-pack/functionbeat/scripts/mage/fields.go new file mode 100644 index 00000000000..259f9921e95 --- /dev/null +++ b/x-pack/functionbeat/scripts/mage/fields.go @@ -0,0 +1,45 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package mage + +import ( + "os" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" +) + +var fb fieldsBuilder + +var _ mage.FieldsBuilder = fb + +type fieldsBuilder struct{} + +func (b fieldsBuilder) All() { + mg.Deps(b.FieldsGo, b.FieldsYML, b.FieldsAllYML) +} + +func (b fieldsBuilder) FieldsGo() error { return b.commonFieldsGo() } + +func (b fieldsBuilder) FieldsYML() error { + if err := mage.GenerateFieldsYAMLTo(mage.FieldsYML); err != nil { + return err + } + return mage.Copy(mage.FieldsYML, mage.FieldsYMLRoot) +} + +func (b fieldsBuilder) FieldsAllYML() error { + return mage.GenerateFieldsYAMLTo(mage.FieldsAllYML) +} + +func (b fieldsBuilder) commonFieldsGo() error { + const file = "build/fields/fields.common.yml" + if err := mage.GenerateFieldsYAMLTo(file); err != nil { + return err + } + defer os.Remove(file) + return mage.GenerateFieldsGo(file, "include/fields.go") +} diff --git a/x-pack/functionbeat/scripts/mage/package.go b/x-pack/functionbeat/scripts/mage/package.go new file mode 100644 index 00000000000..62bb7aed4f2 --- /dev/null +++ b/x-pack/functionbeat/scripts/mage/package.go @@ -0,0 +1,35 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package mage + +import ( + "fmt" + "time" + + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage" + "github.com/elastic/beats/dev-tools/mage/target/build" + "github.com/elastic/beats/dev-tools/mage/target/pkg" +) + +func init() { + mage.BeatDescription = "Functionbeat is a beat implementation for a serverless architecture." +} + +// Package packages the Beat for distribution. +// Use SNAPSHOT=true to build snapshots. +// Use PLATFORMS to control the target platforms. +// Use VERSION_QUALIFIER to control the version qualifier. +func Package() { + start := time.Now() + defer func() { fmt.Println("package ran for", time.Since(start)) }() + + mage.MustUsePackaging(mage.BeatName, mage.XPackBeatDir("scripts/mage/packages.yml")) + + mg.Deps(Update.All) + mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.SerialDeps(mage.Package, pkg.PackageTest) +} diff --git a/x-pack/functionbeat/dev-tools/packaging/packages.yml b/x-pack/functionbeat/scripts/mage/packages.yml similarity index 99% rename from x-pack/functionbeat/dev-tools/packaging/packages.yml rename to x-pack/functionbeat/scripts/mage/packages.yml index 2a844056262..26e9781fd5a 100644 --- a/x-pack/functionbeat/dev-tools/packaging/packages.yml +++ b/x-pack/functionbeat/scripts/mage/packages.yml @@ -68,6 +68,7 @@ shared: pkg/functionbeat: source: 'build/golang-crossbuild/{{.BeatName}}-linux-amd64' mode: 0755 + # specs is a list of named packaging "flavors". specs: functionbeat: diff --git a/x-pack/functionbeat/scripts/mage/update.go b/x-pack/functionbeat/scripts/mage/update.go new file mode 100644 index 00000000000..afc8e9b1a5f --- /dev/null +++ b/x-pack/functionbeat/scripts/mage/update.go @@ -0,0 +1,60 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License; +// you may not use this file except in compliance with the Elastic License. + +package mage + +import ( + "github.com/magefile/mage/mg" + + "github.com/elastic/beats/dev-tools/mage/target/common" + "github.com/elastic/beats/dev-tools/mage/target/dashboards" + "github.com/elastic/beats/dev-tools/mage/target/docs" + + "github.com/elastic/beats/dev-tools/mage/target/build" + + "github.com/elastic/beats/dev-tools/mage" +) + +func init() { + common.RegisterCheckDeps(Update.All) + + dashboards.RegisterImportDeps(build.Build, Update.Dashboards) + + docs.RegisterDeps(Update.FieldDocs) +} + +var ( + // SelectLogic configures the types of project logic to use (OSS vs X-Pack). + SelectLogic mage.ProjectType +) + +// Update target namespace. +type Update mg.Namespace + +// All updates all generated content. +func (Update) All() { + mg.Deps(Update.Fields, Update.Dashboards, Update.Config, Update.FieldDocs) +} + +// Config updates the Beat's config files. +func (Update) Config() error { + return config() +} + +// Dashboards collects all the dashboards and generates index patterns. +func (Update) Dashboards() error { + mg.Deps(fb.FieldsYML) + return mage.KibanaDashboards() +} + +// Fields updates all fields files (.go, .yml). +func (Update) Fields() { + mg.Deps(fb.All) +} + +// FieldDocs updates the field documentation. +func (Update) FieldDocs() error { + mg.Deps(fb.FieldsAllYML) + return mage.Docs.FieldDocs(mage.FieldsAllYML) +} From c3a6acb2501a724f7b73d49b60a4c02a90bd7454 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Wed, 2 Jan 2019 04:22:34 -0500 Subject: [PATCH 12/18] Fix haproxy module expected log --- filebeat/module/haproxy/log/test/default.log-expected.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/filebeat/module/haproxy/log/test/default.log-expected.json b/filebeat/module/haproxy/log/test/default.log-expected.json index 203b6b16c71..5cd558d3b05 100644 --- a/filebeat/module/haproxy/log/test/default.log-expected.json +++ b/filebeat/module/haproxy/log/test/default.log-expected.json @@ -1,6 +1,6 @@ [ { - "@timestamp": "2018-09-20T15:42:59.000Z", + "@timestamp": "2019-09-20T15:42:59.000Z", "destination.ip": "1.2.3.4", "destination.port": 5000, "ecs.version": "1.0.0-beta2", From 4092ddd7313dc2c737dd14298b6e784023798522 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Wed, 2 Jan 2019 11:24:47 -0500 Subject: [PATCH 13/18] Fix targets in jenkins_ci.ps1 --- dev-tools/jenkins_ci.ps1 | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/dev-tools/jenkins_ci.ps1 b/dev-tools/jenkins_ci.ps1 index f768d93e8b7..14d9c469c63 100755 --- a/dev-tools/jenkins_ci.ps1 +++ b/dev-tools/jenkins_ci.ps1 @@ -48,11 +48,5 @@ New-Item -ItemType directory -Path build\coverage | Out-Null New-Item -ItemType directory -Path build\system-tests | Out-Null New-Item -ItemType directory -Path build\system-tests\run | Out-Null -echo "Building fields.yml" -exec { mage fields } "mage fields FAILURE" - -echo "Building $env:beat" -exec { mage build } "Build FAILURE" - -echo "Unit testing $env:beat" -exec { mage unitTest } "mage unitTest FAILURE" +echo "Updating/Building/Testing $env:beat" +exec { mage update build unitTest } "mage update build unitTest FAILURE" From 35a95fdd781bb2ccb6678da7030154c8b29ee724 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Wed, 2 Jan 2019 12:52:41 -0500 Subject: [PATCH 14/18] Debug Filebeat on Jenkins --- dev-tools/jenkins_ci.sh | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/dev-tools/jenkins_ci.sh b/dev-tools/jenkins_ci.sh index 437118a666b..eae34d35e39 100755 --- a/dev-tools/jenkins_ci.sh +++ b/dev-tools/jenkins_ci.sh @@ -36,5 +36,11 @@ trap cleanup EXIT rm -rf ${GOPATH}/pkg cd ${beat} + +MAGEFILE_VERBOSE=0 +if [ "$beat" == "filebeat" ]; then + # Temporarily enable debug for Filebeat since Jenkins is not archiving logs. + export MAGEFILE_VERBOSE=1 +fi make mage RACE_DETECTOR=1 mage clean check build test From 56a0cda48463329e6544c4cdeb079d32402a662d Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Wed, 2 Jan 2019 12:53:27 -0500 Subject: [PATCH 15/18] Fix journalbeat package target deps --- journalbeat/scripts/mage/package.go | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/journalbeat/scripts/mage/package.go b/journalbeat/scripts/mage/package.go index 212909f39dc..ec1baf0557d 100644 --- a/journalbeat/scripts/mage/package.go +++ b/journalbeat/scripts/mage/package.go @@ -24,7 +24,6 @@ import ( "github.com/magefile/mage/mg" "github.com/elastic/beats/dev-tools/mage" - "github.com/elastic/beats/dev-tools/mage/target/build" "github.com/elastic/beats/dev-tools/mage/target/pkg" ) @@ -51,6 +50,6 @@ func Package() { mage.PackageKibanaDashboardsFromBuildDir() mg.Deps(Update.All) - mg.Deps(build.CrossBuild, build.CrossBuildGoDaemon) + mg.Deps(CrossBuild, CrossBuildGoDaemon) mg.SerialDeps(mage.Package, pkg.PackageTest) } From ad377cad95c144ca759cc513c429e14695669309 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Wed, 2 Jan 2019 13:03:48 -0500 Subject: [PATCH 16/18] CreateDir for the Docker image file --- dev-tools/mage/dockerbuilder.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dev-tools/mage/dockerbuilder.go b/dev-tools/mage/dockerbuilder.go index 44791010217..78bcf6a0a6c 100644 --- a/dev-tools/mage/dockerbuilder.go +++ b/dev-tools/mage/dockerbuilder.go @@ -163,7 +163,7 @@ func (b *dockerBuilder) dockerSave(tag string) error { } err = func() error { - f, err := os.Create(outputFile) + f, err := os.Create(CreateDir(outputFile)) if err != nil { return err } From fe8324495d0c7fc7e93093be7747ea1d8bb5fe8f Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Wed, 2 Jan 2019 15:08:13 -0500 Subject: [PATCH 17/18] Enable debug logging for the metricbeat build --- dev-tools/jenkins_ci.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev-tools/jenkins_ci.sh b/dev-tools/jenkins_ci.sh index eae34d35e39..1433157c656 100755 --- a/dev-tools/jenkins_ci.sh +++ b/dev-tools/jenkins_ci.sh @@ -38,8 +38,8 @@ rm -rf ${GOPATH}/pkg cd ${beat} MAGEFILE_VERBOSE=0 -if [ "$beat" == "filebeat" ]; then - # Temporarily enable debug for Filebeat since Jenkins is not archiving logs. +if [ "$beat" == "metricbeat" ]; then + # Temporarily enable debug for Metricbeat since Jenkins is not archiving logs. export MAGEFILE_VERBOSE=1 fi make mage From e3ded462e6678712281e508e7d173ea66e797a46 Mon Sep 17 00:00:00 2001 From: Andrew Kroh Date: Wed, 2 Jan 2019 15:24:39 -0500 Subject: [PATCH 18/18] Fix metricbeat packaging module glob --- metricbeat/scripts/mage/package.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/metricbeat/scripts/mage/package.go b/metricbeat/scripts/mage/package.go index a26872097f0..97542b4fa4c 100644 --- a/metricbeat/scripts/mage/package.go +++ b/metricbeat/scripts/mage/package.go @@ -67,7 +67,7 @@ func Package() { // And for Windows it comments out the system/load metricset because it's // not supported. func customizePackaging() { - const shortConfigGlob = "modules/*/_meta/config.yml" + const shortConfigGlob = "module/*/_meta/config.yml" inputGlobs := []string{mage.OSSBeatDir(shortConfigGlob)} if mage.BeatProjectType == mage.XPackProject { inputGlobs = append(inputGlobs, mage.XPackBeatDir(shortConfigGlob))