Skip to content

Commit

Permalink
Merge branch 'main' into fork_stats_work
Browse files Browse the repository at this point in the history
  • Loading branch information
droberts195 committed Mar 19, 2024
2 parents c6e39dc + 43b8ca0 commit 1e92cfe
Show file tree
Hide file tree
Showing 12,263 changed files with 524,193 additions and 210,940 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
4 changes: 2 additions & 2 deletions .backportrc.json
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
{
"upstream" : "elastic/elasticsearch",
"targetBranchChoices" : [ "main", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ],
"targetBranchChoices" : [ "main", "8.13", "8.12", "8.11", "8.10", "8.9", "8.8", "8.7", "8.6", "8.5", "8.4", "8.3", "8.2", "8.1", "8.0", "7.17", "6.8" ],
"targetPRLabels" : [ "backport" ],
"branchLabelMapping" : {
"^v8.11.0$" : "main",
"^v8.14.0$" : "main",
"^v(\\d+).(\\d+).\\d+(?:-(?:alpha|beta|rc)\\d+)?$" : "$1.$2"
}
}
2 changes: 2 additions & 0 deletions .buildkite/.editorconfig
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[*.ts]
max_line_length = 120
169 changes: 169 additions & 0 deletions .buildkite/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,169 @@
# Based on https://github.com/raw/github/gitignore/main/Node.gitignore

# Logs

logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*

# Diagnostic reports (https://nodejs.org/api/report.html)

report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json

# Runtime data

pids
_.pid
_.seed
\*.pid.lock

# Directory for instrumented libs generated by jscoverage/JSCover

lib-cov

# Coverage directory used by tools like istanbul

coverage
\*.lcov

# nyc test coverage

.nyc_output

# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)

.grunt

# Bower dependency directory (https://bower.io/)

bower_components

# node-waf configuration

.lock-wscript

# Compiled binary addons (https://nodejs.org/api/addons.html)

build/Release

# Dependency directories

node_modules/
jspm_packages/

# Snowpack dependency directory (https://snowpack.dev/)

web_modules/

# TypeScript cache

\*.tsbuildinfo

# Optional npm cache directory

.npm

# Optional eslint cache

.eslintcache

# Optional stylelint cache

.stylelintcache

# Microbundle cache

.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/

# Optional REPL history

.node_repl_history

# Output of 'npm pack'

\*.tgz

# Yarn Integrity file

.yarn-integrity

# dotenv environment variable files

.env
.env.development.local
.env.test.local
.env.production.local
.env.local

# parcel-bundler cache (https://parceljs.org/)

.cache
.parcel-cache

# Next.js build output

.next
out

# Nuxt.js build / generate output

.nuxt
dist

# Gatsby files

.cache/

# Comment in the public line in if your project uses Gatsby and not Next.js

# https://nextjs.org/blog/next-9-1#public-directory-support

# public

# vuepress build output

.vuepress/dist

# vuepress v2.x temp and cache directory

.temp
.cache

# Docusaurus cache and generated files

.docusaurus

# Serverless directories

.serverless/

# FuseBox cache

.fusebox/

# DynamoDB Local files

.dynamodb/

# TernJS port file

.tern-port

# Stores VSCode versions used for testing VSCode extensions

.vscode-test

# yarn v2

.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.\*
41 changes: 41 additions & 0 deletions .buildkite/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# Elasticsearch CI Pipelines

This directory contains pipeline definitions and scripts for running Elasticsearch CI on Buildkite.

## Directory Structure

- [pipelines](pipelines/) - pipeline definitions/yml
- [scripts](scripts/) - scripts used by pipelines, inside steps
- [hooks](hooks/) - [Buildkite hooks](https://buildkite.com/docs/agent/v3/hooks), where global env vars and secrets are set

## Pipeline Definitions

Pipelines are defined using YAML files residing in [pipelines](pipelines/). These are mostly static definitions that are used as-is, but there are a few dynamically-generated exceptions (see below).

### Dynamically Generated Pipelines

Pull request pipelines are generated dynamically based on labels, files changed, and other properties of pull requests.

Non-pull request pipelines that include BWC version matrices must also be generated whenever the [list of BWC versions](../.ci/bwcVersions) is updated.

#### Pull Request Pipelines

Pull request pipelines are generated dynamically at CI time based on numerous properties of the pull request. See [scripts/pull-request](scripts/pull-request) for details.

#### BWC Version Matrices

For pipelines that include BWC version matrices, you will see one or more template files (e.g. [periodic.template.yml](pipelines/periodic.template.yml)) and a corresponding generated file (e.g. [periodic.yml](pipelines/periodic.yml)). The generated file is the one that is actually used by Buildkite.

These files are updated by running:

```bash
./gradlew updateCIBwcVersions
```

This also runs automatically during release procedures.

You should always make changes to the template files, and run the above command to update the generated files.

## Node / TypeScript

Node (technically `bun`), TypeScript, and related files are currently used to generate pipelines for pull request CI. See [scripts/pull-request](scripts/pull-request) for details.
Binary file added .buildkite/bun.lockb
Binary file not shown.
2 changes: 1 addition & 1 deletion .buildkite/check-es-serverless.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
steps:
- trigger: elasticsearch-serverless-update-submodule
- trigger: elasticsearch-serverless-validate-submodule
label: ":elasticsearch: Check elasticsearch changes against serverless"
build:
message: "Validate latest elasticsearch changes"
Expand Down
33 changes: 32 additions & 1 deletion .buildkite/hooks/pre-command
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ BUILDKITE_API_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/bu
export BUILDKITE_API_TOKEN

if [[ "${USE_LUCENE_SNAPSHOT_CREDS:-}" == "true" ]]; then
data=$(.buildkite/scripts/lucene-snapshot/get-credentials.sh)
data=$(.buildkite/scripts/get-legacy-secret.sh aws-elastic/creds/lucene-snapshots)

AWS_ACCESS_KEY_ID=$(echo "$data" | jq -r .data.access_key)
export AWS_ACCESS_KEY_ID
Expand All @@ -70,7 +70,38 @@ if [[ "${USE_DRA_CREDENTIALS:-}" == "true" ]]; then
export DRA_VAULT_ADDR
fi

source .buildkite/scripts/third-party-test-credentials.sh

if [[ "${USE_SNYK_CREDENTIALS:-}" == "true" ]]; then
SNYK_TOKEN=$(vault read -field=token secret/ci/elastic-elasticsearch/migrated/snyk)
export SNYK_TOKEN
fi

if [[ "${USE_PROD_DOCKER_CREDENTIALS:-}" == "true" ]]; then
DOCKER_REGISTRY_USERNAME="$(vault read -field=username secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)"
export DOCKER_REGISTRY_USERNAME

DOCKER_REGISTRY_PASSWORD="$(vault read -field=password secret/ci/elastic-elasticsearch/migrated/prod_docker_registry_credentials)"
export DOCKER_REGISTRY_PASSWORD
fi

if [[ "$BUILDKITE_AGENT_META_DATA_PROVIDER" != *"k8s"* ]]; then
# Run in the background, while the job continues
nohup .buildkite/scripts/setup-monitoring.sh </dev/null >/dev/null 2>&1 &
fi

# Initialize the build scan and gobld annotations with empty/open <details> tags
# This ensures that they are collapsible when they get appended to
if [[ "${BUILDKITE_LABEL:-}" == *"Pipeline upload"* || "${BUILDKITE_LABEL:-}" == *"Upload Pipeline"* ]]; then
cat << EOF | buildkite-agent annotate --context "gradle-build-scans" --style "info"
<details>
<summary>Gradle build scan links</summary>
EOF

cat << EOF | buildkite-agent annotate --context "ctx-gobld-metrics" --style "info"
<details>
<summary>Agent information from gobld</summary>
EOF
fi
12 changes: 12 additions & 0 deletions .buildkite/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
{
"name": "buildkite-pipelines",
"type": "module",
"devDependencies": {
"@types/node": "^20.6.0",
"bun-types": "latest",
"yaml": "^2.3.2"
},
"peerDependencies": {
"typescript": "^5.0.0"
}
}
34 changes: 34 additions & 0 deletions .buildkite/packer_cache.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/bin/bash

ROOT_DIR=$(cd "$(dirname "$0")/.." && pwd)

branches=($(cat "$ROOT_DIR/branches.json" | jq -r '.branches[].branch'))
for branch in "${branches[@]}"; do
echo "Resolving dependencies for ${branch} branch"
rm -rf "checkout/$branch"
git clone /opt/git-mirrors/elastic-elasticsearch --branch "$branch" --single-branch "checkout/$branch"

CHECKOUT_DIR=$(cd "./checkout/${branch}" && pwd)
CI_DIR="$CHECKOUT_DIR/.ci"

if [ "$(uname -m)" = "arm64" ] || [ "$(uname -m)" = "aarch64" ]; then
## On ARM we use a different properties file for setting java home
## Also, we don't bother attempting to resolve dependencies for the 6.8 branch
source "$CI_DIR/java-versions-aarch64.properties"
export JAVA16_HOME="$HOME/.java/jdk16"
else
source "$CI_DIR/java-versions.properties"
## We are caching BWC versions too, need these so we can build those
export JAVA8_HOME="$HOME/.java/java8"
export JAVA11_HOME="$HOME/.java/java11"
export JAVA12_HOME="$HOME/.java/openjdk12"
export JAVA13_HOME="$HOME/.java/openjdk13"
export JAVA14_HOME="$HOME/.java/openjdk14"
export JAVA15_HOME="$HOME/.java/openjdk15"
export JAVA16_HOME="$HOME/.java/openjdk16"
fi

export JAVA_HOME="$HOME/.java/$ES_BUILD_JAVA"
"checkout/${branch}/gradlew" --project-dir "$CHECKOUT_DIR" --parallel -s resolveAllDependencies -Dorg.gradle.warning.mode=none -DisCI
rm -rf "checkout/${branch}"
done
10 changes: 10 additions & 0 deletions .buildkite/pipelines/dra-workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,13 @@ steps:
image: family/elasticsearch-ubuntu-2204
machineType: custom-32-98304
buildDirectory: /dev/shm/bk
- wait
# The hadoop build depends on the ES artifact
# So let's trigger the hadoop build any time we build a new staging artifact
- trigger: elasticsearch-hadoop-dra-workflow
async: true
build:
branch: "${BUILDKITE_BRANCH}"
env:
DRA_WORKFLOW: staging
if: build.env('DRA_WORKFLOW') == 'staging'
2 changes: 2 additions & 0 deletions .buildkite/pipelines/ecs-dynamic-template-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,5 +10,7 @@ steps:
notify:
- slack: "#es-delivery"
if: build.state == "failed"
- slack: "#es-data-management"
if: build.state == "failed"
- email: "logs-plus@elastic.co"
if: build.state == "failed"
14 changes: 11 additions & 3 deletions .buildkite/pipelines/intake.template.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,23 +14,31 @@ steps:
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: custom-32-98304
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part2
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart2
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: custom-32-98304
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part3
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart3
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: custom-32-98304
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- label: part4
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dorg.elasticsearch.build.cache.push=true -Dignore.tests.seed -Dscan.capture-task-input-files checkPart4
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
- group: bwc-snapshots
steps:
Expand Down
Loading

0 comments on commit 1e92cfe

Please sign in to comment.