Skip to content

Commit

Permalink
Merge remote-tracking branch 'origin/bugfix/ZENKO-4898' into w/2.7/bu…
Browse files Browse the repository at this point in the history
…gfix/ZENKO-4898
  • Loading branch information
williamlardier committed Sep 26, 2024
2 parents 41a23e8 + 7ab8dce commit b5626b4
Show file tree
Hide file tree
Showing 15 changed files with 322 additions and 108 deletions.
18 changes: 18 additions & 0 deletions .github/actions/archive-artifacts/action.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,3 +42,21 @@ runs:
sh -c "kubectl exec -i -n ${NAMESPACE} kcat -- \
kcat -L -b ${KAFKA_SERVICE} -t {} -C -o beginning -e -q -J \
> /tmp/artifacts/data/${STAGE}/kafka-messages-{}.log"
- name: Dump MongoDB
shell: bash
continue-on-error: true
run: |-
set -exu
ZENKO_MONGODB_DATABASE="${ZENKO_MONGODB_DATABASE:-zenko-database}"
MONGODB_ROOT_USERNAME="${MONGODB_ROOT_USERNAME:-root}"
MONGODB_ROOT_PASSWORD="${MONGODB_ROOT_PASSWORD:-rootpass}"
NAMESPACE="${NAMESPACE:-default}"
DUMP_DIR="/tmp/mongodb.dump"
kubectl exec -n ${NAMESPACE} data-db-mongodb-sharded-mongos-0 -- mongodump --db ${ZENKO_MONGODB_DATABASE} -u ${MONGODB_ROOT_USERNAME} -p ${MONGODB_ROOT_PASSWORD} --authenticationDatabase admin --out ${DUMP_DIR}
kubectl exec -n ${NAMESPACE} data-db-mongodb-sharded-mongos-0 -- bash -c "for bson_file in ${DUMP_DIR}/${ZENKO_MONGODB_DATABASE}/*.bson; do json_file=\"${DUMP_DIR}/\$(basename \${bson_file} .bson).json\"; bsondump --outFile \${json_file} \${bson_file}; done"
mkdir -p /tmp/artifacts/data/${STAGE}/mongodb-dump
kubectl cp ${NAMESPACE}/data-db-mongodb-sharded-mongos-0:${DUMP_DIR} /tmp/artifacts/data/${STAGE}/mongodb-dump
2 changes: 1 addition & 1 deletion .github/scripts/end2end/configs/zenko_dr_sink.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ spec:
userSecretName: mongodb-db-creds-pra
usernameKey: mongodb-username
passwordKey: mongodb-password
databaseName: "pradb"
databaseName: pradb
writeConcern: "majority"
kafka:
managed:
Expand Down
2 changes: 1 addition & 1 deletion .github/scripts/end2end/deploy-zenko.sh
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export ZENKO_ANNOTATIONS="annotations:"
export ZENKO_MONGODB_ENDPOINT="data-db-mongodb-sharded.default.svc.cluster.local:27017"
export ZENKO_MONGODB_CONFIG="writeConcern: 'majority'
enableSharding: true"
export ZENKO_MONGODB_DATABASE="${ZENKO_MONGODB_DATABASE:-'datadb'}"
export ZENKO_MONGODB_DATABASE="${ZENKO_MONGODB_DATABASE:-datadb}"

if [ "${TIME_PROGRESSION_FACTOR}" -gt 1 ]; then
export ZENKO_ANNOTATIONS="$ZENKO_ANNOTATIONS
Expand Down
2 changes: 1 addition & 1 deletion .github/scripts/end2end/install-kind-dependencies.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ MONGODB_ROOT_USERNAME=root
MONGODB_ROOT_PASSWORD=rootpass
MONGODB_APP_USERNAME=data
MONGODB_APP_PASSWORD=datapass
MONGODB_APP_DATABASE="${ZENKO_MONGODB_DATABASE:-'datadb'}"
MONGODB_APP_DATABASE=${ZENKO_MONGODB_DATABASE:-datadb}
MONGODB_RS_KEY=0123456789abcdef

ENABLE_KEYCLOAK_HTTPS=${ENABLE_KEYCLOAK_HTTPS:-'false'}
Expand Down
2 changes: 1 addition & 1 deletion .github/scripts/end2end/prepare-pra.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ export MONGODB_PRA_DATABASE="${MONGODB_PRA_DATABASE:-'pradb'}"
export ZENKO_MONGODB_DATABASE="${MONGODB_PRA_DATABASE}"
export ZENKO_MONGODB_SECRET_NAME="mongodb-db-creds-pra"

echo 'ZENKO_MONGODB_DATABASE="pradb"' >> "$GITHUB_ENV"
echo 'ZENKO_MONGODB_DATABASE=pradb' >> "$GITHUB_ENV"
echo 'ZENKO_MONGODB_SECRET_NAME="mongodb-db-creds-pra"' >> "$GITHUB_ENV"

echo 'ZENKO_IAM_INGRESS="iam.dr.zenko.local"' >> "$GITHUB_ENV"
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/end2end.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -478,7 +478,7 @@ jobs:
- name: Deploy second Zenko for PRA
run: bash deploy-zenko.sh end2end-pra default './configs/zenko.yaml'
env:
ZENKO_MONGODB_DATABASE: "pradb"
ZENKO_MONGODB_DATABASE: pradb
working-directory: ./.github/scripts/end2end
- name: Add Keycloak pra user and assign StorageManager role
shell: bash
Expand Down
77 changes: 37 additions & 40 deletions tests/ctst/common/common.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ import Zenko from 'world/Zenko';
import { safeJsonParse } from './utils';
import assert from 'assert';
import { Admin, Kafka } from 'kafkajs';
import {
import {
createBucketWithConfiguration,
putObject,
runActionAgainstBucket,
Expand All @@ -31,25 +31,33 @@ export async function cleanS3Bucket(
if (!bucketName) {
return;
}
world.resetCommand();
world.addCommandParameter({ bucket: bucketName });
const createdObjects = world.getSaved<Map<string, string>>('createdObjects');
if (createdObjects !== undefined) {
const results = await S3.listObjectVersions(world.getCommandParameters());
const res = safeJsonParse<ListObjectVersionsOutput>(results.stdout);
assert(res.ok);
const versions = res.result!.Versions || [];
const deleteMarkers = res.result!.DeleteMarkers || [];
await Promise.all(versions.concat(deleteMarkers).map(obj => {
world.addCommandParameter({ key: obj.Key });
world.addCommandParameter({ versionId: obj.VersionId });
return S3.deleteObject(world.getCommandParameters());
}));
world.deleteKeyFromCommand('key');
world.deleteKeyFromCommand('versionId');
try {
Identity.useIdentity(IdentityEnum.ACCOUNT, world.getSaved<string>('accountName') ||
world.parameters.AccountName);
world.resetCommand();
world.addCommandParameter({ bucket: bucketName });
const createdObjects = world.getCreatedObjects();
if (createdObjects !== undefined) {
const results = await S3.listObjectVersions(world.getCommandParameters());
const res = safeJsonParse<ListObjectVersionsOutput>(results.stdout);
if (!res.ok) {
throw results;
}
const versions = res.result!.Versions || [];
const deleteMarkers = res.result!.DeleteMarkers || [];
await Promise.all(versions.concat(deleteMarkers).map(obj => {
world.addCommandParameter({ key: obj.Key });
world.addCommandParameter({ versionId: obj.VersionId });
return S3.deleteObject(world.getCommandParameters());
}));
world.deleteKeyFromCommand('key');
world.deleteKeyFromCommand('versionId');
}
await S3.deleteBucketLifecycle(world.getCommandParameters());
await S3.deleteBucket(world.getCommandParameters());
} catch (err) {
world.logger.warn('Error cleaning bucket', { bucketName, err });
}
await S3.deleteBucketLifecycle(world.getCommandParameters());
await S3.deleteBucket(world.getCommandParameters());
}

async function addMultipleObjects(this: Zenko, numberObjects: number,
Expand All @@ -65,17 +73,12 @@ async function addMultipleObjects(this: Zenko, numberObjects: number,
if (userMD) {
this.addCommandParameter({ metadata: JSON.stringify(userMD) });
}
this.addToSaved('objectName', objectNameFinal);
this.logger.debug('Adding object', { objectName: objectNameFinal });
lastResult = await putObject(this, objectNameFinal);
const createdObjects = this.getSaved<Map<string, string>>('createdObjects') || new Map<string, string>();
createdObjects.set(this.getSaved<string>('objectName'), this.getSaved<string>('versionId'));
this.addToSaved('createdObjects', createdObjects);
}
return lastResult;
}

async function addUserMetadataToObject(this: Zenko, objectName: string|undefined, userMD: string) {
async function addUserMetadataToObject(this: Zenko, objectName: string | undefined, userMD: string) {
const objName = objectName || this.getSaved<string>('objectName');
const bucketName = this.getSaved<string>('bucketName');
this.resetCommand();
Expand Down Expand Up @@ -154,7 +157,7 @@ Given('a tag on object {string} with key {string} and value {string}',
this.resetCommand();
this.addCommandParameter({ bucket: this.getSaved<string>('bucketName') });
this.addCommandParameter({ key: objectName });
const versionId = this.getSaved<Map<string, string>>('createdObjects')?.get(objectName);
const versionId = this.getLatestObjectVersion(objectName);
if (versionId) {
this.addCommandParameter({ versionId });
}
Expand All @@ -173,12 +176,12 @@ Then('object {string} should have the tag {string} with value {string}',
this.resetCommand();
this.addCommandParameter({ bucket: this.getSaved<string>('bucketName') });
this.addCommandParameter({ key: objectName });
const versionId = this.getSaved<Map<string, string>>('createdObjects')?.get(objectName);
const versionId = this.getLatestObjectVersion(objectName);
if (versionId) {
this.addCommandParameter({ versionId });
}
await S3.getObjectTagging(this.getCommandParameters()).then(res => {
const parsed = safeJsonParse<{ TagSet: [{Key: string, Value: string}] | undefined }>(res.stdout);
const parsed = safeJsonParse<{ TagSet: [{ Key: string, Value: string }] | undefined }>(res.stdout);
assert(parsed.result!.TagSet?.some(tag => tag.Key === tagKey && tag.Value === tagValue));
});
});
Expand All @@ -188,14 +191,14 @@ Then('object {string} should have the user metadata with key {string} and value
this.resetCommand();
this.addCommandParameter({ bucket: this.getSaved<string>('bucketName') });
this.addCommandParameter({ key: objectName });
const versionId = this.getSaved<Map<string, string>>('createdObjects')?.get(objectName);
const versionId = this.getLatestObjectVersion(objectName);
if (versionId) {
this.addCommandParameter({ versionId });
}
const res = await S3.headObject(this.getCommandParameters());
assert.ifError(res.stderr);
assert(res.stdout);
const parsed = safeJsonParse<{ Metadata: {[key: string]: string} | undefined }>(res.stdout);
const parsed = safeJsonParse<{ Metadata: { [key: string]: string } | undefined }>(res.stdout);
assert(parsed.ok);
assert(parsed.result!.Metadata);
assert(parsed.result!.Metadata[userMDKey]);
Expand All @@ -220,7 +223,7 @@ When('i delete object {string}', async function (this: Zenko, objectName: string
this.resetCommand();
this.addCommandParameter({ bucket: this.getSaved<string>('bucketName') });
this.addCommandParameter({ key: objName });
const versionId = this.getSaved<Map<string, string>>('createdObjects')?.get(objName);
const versionId = this.getLatestObjectVersion(objName);
if (versionId) {
this.addCommandParameter({ versionId });
}
Expand Down Expand Up @@ -251,7 +254,7 @@ Then('kafka consumed messages should not take too much place on disk', { timeout
const kafkaAdmin = new Kafka({ brokers: [this.parameters.KafkaHosts] }).admin();
const topics: string[] = (await kafkaAdmin.listTopics())
.filter(t => (t.includes(this.parameters.InstanceID) &&
!ignoredTopics.some(e => t.includes(e))));
!ignoredTopics.some(e => t.includes(e))));

const previousOffsets = await getTopicsOffsets(topics, kafkaAdmin);

Expand Down Expand Up @@ -378,21 +381,15 @@ Given('an upload size of {int} B for the object {string}', async function (
) {
this.addToSaved('objectSize', size);
if (this.getSaved<boolean>('preExistingObject')) {
if (objectName) {
this.addToSaved('objectName', objectName);
} else {
this.addToSaved('objectName', `object-${Utils.randomString()}`);
}
await putObject(this, this.getSaved<string>('objectName'));
await putObject(this, objectName);
}
});

When('I PUT an object with size {int}', async function (this: Zenko, size: number) {
if (size > 0) {
this.addToSaved('objectSize', size);
}
this.addToSaved('objectName', `object-${Utils.randomString()}`);
const result = await addMultipleObjects.call(
this, 1, this.getSaved<string>('objectName'), size);
this, 1, `object-${Utils.randomString()}`, size);
this.setResult(result!);
});
29 changes: 29 additions & 0 deletions tests/ctst/common/hooks.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,10 @@ import { prepareQuotaScenarios, teardownQuotaScenarios } from 'steps/quotas/quot
import { cleanS3Bucket } from './common';
import { cleanAzureContainer, cleanZenkoLocation } from 'steps/azureArchive';
import { displayDebuggingInformation, preparePRA } from 'steps/pra';
import {
cleanupAccount,
} from './utils';
import { cleanS3Bucket } from './common';

// HTTPS should not cause any error for CTST
process.env.NODE_TLS_REJECT_UNAUTHORIZED = '0';
Expand Down Expand Up @@ -39,6 +43,19 @@ Before({ tags: '@Quotas', timeout: 1200000 }, async function (scenarioOptions) {
await prepareQuotaScenarios(this as Zenko, scenarioOptions);
});

After(async function (this: Zenko, results) {
if (results.result?.status === 'FAILED') {
this.logger.warn('bucket was not cleaned for test', {
bucket: this.getSaved<string>('bucketName'),
});
return;
}
await cleanS3Bucket(
this,
this.getSaved<string>('bucketName'),
);
});

After({ tags: '@Quotas' }, async function () {
await teardownQuotaScenarios(this as Zenko);
});
Expand All @@ -58,4 +75,16 @@ After({ tags: '@AzureArchive' }, async function (this: Zenko) {
);
});

After({ tags: '@BP-ASSUME_ROLE_USER_CROSS_ACCOUNT'}, async function (this: Zenko, results) {
const crossAccountName = this.getSaved<string>('crossAccountName');

if (results.result?.status === 'FAILED' || !crossAccountName) {
this.logger.warn('cross account was not cleaned for test', {
crossAccountName,
});
return;
}
await cleanupAccount(this, crossAccountName);
});

export default Zenko;
Loading

0 comments on commit b5626b4

Please sign in to comment.