diff --git a/tests/.env b/tests/.env index 60cff11b71..f11c10658c 100644 --- a/tests/.env +++ b/tests/.env @@ -19,10 +19,13 @@ export AWS_S3_BACKBEAT_BUCKET_NAME=ci-zenko-aws-crr-target-bucket export AWS_S3_BACKBEAT_SRC_BUCKET_NAME=ci-zenko-aws-crr-src-bucket export AWS_S3_BUCKET_NAME=ci-zenko-aws-target-bucket export AWS_S3_BUCKET_NAME_2=ci-zenko-aws-target-bucket-2 +export AWS_S3_BACKEND_SOURCE_LOCATION=ci-zenko-aws-source-location +export AWS_S3_BACKEND_DESTINATION_LOCATION=ci-zenko-aws-destination-location export AZURE_BACKBEAT_CONTAINER_NAME=ci-zenko-azure-crr-target-bucket export AZURE_BACKBEAT_SRC_CONTAINER_NAME=ci-zenko-azure-crr-src-bucket export AZURE_BACKEND_CONTAINER_NAME=ci-zenko-azure-target-bucket export AZURE_BACKEND_CONTAINER_NAME_2=ci-zenko-azure-target-bucket-2 +export AZURE_BACKEND_DESTINATION_LOCATION=ci-zenko-azure-destination-location export GCP_BUCKET_NAME=ci-zenko-gcp-target-bucket export GCP_BUCKET_NAME_2=ci-zenko-gcp-target-bucket-2 export GCP_CRR_BUCKET_NAME=ci-zenko-gcp-crr-target-bucket diff --git a/tests/.secrets.env.example b/tests/.secrets.env.example index a4acca6b60..0d4a2061f8 100644 --- a/tests/.secrets.env.example +++ b/tests/.secrets.env.example @@ -1,3 +1,5 @@ +export ZENKO_STORAGE_ACCOUNT_ACCESS_KEY= +export ZENKO_STORAGE_ACCOUNT_SECRET_KEY= export AWS_ACCESS_KEY_ID= export AWS_GCP_BACKEND_ACCESS_KEY= export AWS_GCP_BACKEND_ACCESS_KEY_2= diff --git a/tests/zenko_e2e/backbeat/ReplicationUtility.js b/tests/zenko_e2e/backbeat/ReplicationUtility.js index 8639c190ac..d362c6c05f 100644 --- a/tests/zenko_e2e/backbeat/ReplicationUtility.js +++ b/tests/zenko_e2e/backbeat/ReplicationUtility.js @@ -5,8 +5,9 @@ const fs = require('fs'); const { scalityS3Client, awsS3Client } = require('./s3SDK'); -const srcLocation = process.env.AWS_SOURCE_LOCATION; -const destLocation = process.env.AWS_DESTINATION_LOCATION; +const srcLocation = process.env.AWS_S3_BACKEND_SOURCE_LOCATION; +const destAWSLocation = process.env.AWS_S3_BACKEND_DESTINATION_LOCATION; +const destAzureLocation = process.env.AZURE_BACKEND_DESTINATION_LOCATION; const REPLICATION_TIMEOUT = 10000; class ReplicationUtility { @@ -37,6 +38,11 @@ class ReplicationUtility { versionInfo.VersionId, next), cb); } + _deleteBlobList(blobList, containerName, cb) { + async.each(blobList, (blob, next) => + this.deleteBlob(containerName, blob.name, undefined, next), cb); + } + _setS3Client(s3Client) { this.s3 = s3Client; return this; @@ -65,6 +71,20 @@ class ReplicationUtility { }); } + deleteAllBlobs(containerName, keyPrefix, cb) { + const options = { include: 'metadata' }; + this.azure.listBlobsSegmented(containerName, null, options, + (err, result, response) => { + if (err) { + return cb(err); + } + // Only delete the blobs put by the current test. + const filteredEntries = result.entries.filter(entry => + entry.name.startsWith(keyPrefix)); + return this._deleteBlobList(filteredEntries, containerName, cb); + }); + } + putObject(bucketName, objectName, content, cb) { this.s3.putObject({ Bucket: bucketName, @@ -315,6 +335,24 @@ class ReplicationUtility { }, cb); } + getBlobToText(containerName, blob, cb) { + this.azure.getBlobToText(containerName, blob, cb); + } + + getBlob(containerName, blob, cb) { + const request = this.azure.createReadStream(containerName, blob); + const data = []; + let totalLength = 0; + request.on('data', chunk => { + totalLength += chunk.length; + data.push(chunk); + }); + request.on('end', () => { + cb(null, Buffer.concat(data, totalLength)) + }); + request.on('error', err => cb(err)); + } + createBucket(bucketName, cb) { this.s3.createBucket({ Bucket: bucketName }, cb); } @@ -437,6 +475,10 @@ class ReplicationUtility { }, cb); } + deleteBlob(containerName, blob, options, cb) { + this.azure.deleteBlob(containerName, blob, options, cb); + } + // Continue getting head object while the status is PENDING or PROCESSING. waitUntilReplicated(bucketName, key, versionId, cb) { let status; @@ -461,8 +503,8 @@ class ReplicationUtility { // Continue getting object while the object exists. waitUntilDeleted(bucketName, key, client, cb) { let objectExists; - const method = 'getObject'; - const expectedCode = 'NoSuchKey'; + const method = client === 'azure' ? 'getBlobToText' : 'getObject'; + const expectedCode = client === 'azure' ? 'BlobNotFound' : 'NoSuchKey'; return async.doWhilst(callback => this[method](bucketName, key, err => { if (err && err.code !== expectedCode) { @@ -495,9 +537,9 @@ class ReplicationUtility { destData.ContentLength); this._compareObjectBody(srcData.Body, destData.Body); const srcUserMD = srcData.Metadata; - assert.strictEqual(srcUserMD[`${destLocation}-version-id`], + assert.strictEqual(srcUserMD[`${destAWSLocation}-version-id`], destData.VersionId); - assert.strictEqual(srcUserMD[`${destLocation}-replication-status`], + assert.strictEqual(srcUserMD[`${destAWSLocation}-replication-status`], 'COMPLETED'); const destUserMD = destData.Metadata; assert.strictEqual(destUserMD['scal-version-id'], @@ -516,6 +558,81 @@ class ReplicationUtility { }); } + compareObjectsAzure(srcBucket, containerName, key, cb) { + return async.series([ + next => this.waitUntilReplicated(srcBucket, key, undefined, next), + next => this.getObject(srcBucket, key, next), + next => this.azure.getBlobProperties(containerName, + `${srcBucket}/${key}`, next), + next => this.getBlob(containerName, + `${srcBucket}/${key}`, next), + ], (err, data) => { + if (err) { + return cb(err); + } + const srcData = data[1]; + const destProperties = data[2]; + const destPropResult = destProperties[0]; + const destPropResponse = destProperties[1]; + const destDataBuf = data[3]; + assert.strictEqual(srcData.ReplicationStatus, 'COMPLETED'); + // Azure does not have versioning so there is no version metadata + // from Azure to set on the source. + assert.strictEqual( + srcData.Metadata[`${destAzureLocation}-replication-status`], + 'COMPLETED'); + assert.strictEqual( + destPropResult.metadata['scal_replication_status'], 'REPLICA'); + assert.strictEqual( + destPropResult.metadata['scal_version_id'], srcData.VersionId); + assert.strictEqual( + destPropResponse.headers['x-ms-meta-scal_replication_status'], + 'REPLICA'); + assert.strictEqual( + destPropResponse.headers['x-ms-meta-scal_version_id'], + srcData.VersionId); + this._compareObjectBody(srcData.Body, destDataBuf); + return cb(); + }); + } + + compareAzureObjectProperties(srcBucket, containerName, key, cb) { + return async.series([ + next => this.waitUntilReplicated(srcBucket, key, undefined, next), + next => this.getHeadObject(srcBucket, key, next), + next => this.azure.getBlobProperties(containerName, + `${srcBucket}/${key}`, next), + ], (err, data) => { + if (err) { + return cb(err); + } + const srcData = data[1]; + const destData = data[2]; + const destResult = destData[0]; + const destResponse = destData[1]; + const { contentSettings } = destResult; + const { headers } = destResponse; + let expectedVal = srcData.Metadata.customkey; + assert.strictEqual(expectedVal, + destResult.metadata['customkey']); + assert.strictEqual(expectedVal, + headers['x-ms-meta-customkey']); + expectedVal = srcData.ContentType; + assert.strictEqual(expectedVal, contentSettings.contentType); + assert.strictEqual(expectedVal, headers['content-type']); + expectedVal = srcData.CacheControl; + assert.strictEqual(expectedVal, contentSettings.cacheControl); + assert.strictEqual(expectedVal, headers['cache-control']); + expectedVal = srcData.ContentEncoding; + assert.strictEqual(expectedVal, contentSettings.contentEncoding); + assert.strictEqual(expectedVal, headers['content-encoding']); + expectedVal = srcData.ContentLanguage; + assert.strictEqual(expectedVal, contentSettings.contentLanguage); + assert.strictEqual(expectedVal, headers['content-language']); + return cb(); + }); + }; + compareACLsAWS(srcBucket, destBucket, key, cb) { return async.series([ next => this.waitUntilReplicated(srcBucket, key, undefined, next), @@ -555,6 +672,35 @@ class ReplicationUtility { }); } + compareObjectTagsAzure(srcBucket, destContainer, key, scalityVersionId, + cb) { + return async.series([ + next => this.waitUntilReplicated(srcBucket, key, scalityVersionId, + next), + next => this.getObjectTagging(srcBucket, key, scalityVersionId, + next), + next => this.azure.getBlobMetadata(destContainer, + `${srcBucket}/${key}`, next), + ], (err, data) => { + if (err) { + return cb(err); + } + const srcData = data[1]; + const destData = data[2]; + const destTagSet = []; + const destTags = destData[0].metadata.tags; + if (destTags) { + const parsedTags = JSON.parse(destTags); + Object.keys(parsedTags).forEach(key => destTagSet.push({ + Key: key, + Value: parsedTags[key], + })); + } + assert.deepStrictEqual(srcData.TagSet, destTagSet); + return cb(); + }); + } + assertNoObject(bucketName, key, cb) { this.getObject(bucketName, key, err => { assert.strictEqual(err.code, 'NoSuchKey'); diff --git a/tests/zenko_e2e/backbeat/Using.md b/tests/zenko_e2e/backbeat/Using.md index 37117579aa..d22e3be206 100644 --- a/tests/zenko_e2e/backbeat/Using.md +++ b/tests/zenko_e2e/backbeat/Using.md @@ -10,47 +10,72 @@ ``` 2. Create an account using Orbit. -3. Export the access key and secret key of that account: +3. Export the access key and secret key of that account (for example, in + `.secrets.env`): ``` -export ZENKO_BACKBEAT_ACCESS_KEY= -export ZENKO_BACKBEAT_SECRET_KEY= +export ZENKO_STORAGE_ACCOUNT_ACCESS_KEY= +export ZENKO_STORAGE_ACCOUNT_SECRET_KEY= ``` -3. Install node and npm. -4. Navigate to `Zenko/tests/zenko_e2e/backbeat`. -5. Install node modules: `npm i`. +4. Install node and npm. +5. Navigate to `Zenko/tests/zenko_e2e/backbeat`. +6. Install node modules: `npm i`. ### Tests for CRR to AWS: -1. Create a bucket on AWS `` with versioning enabled. -2. In Orbit, create an AWS location `` with an AWS - ``. -3. In Orbit, create an AWS location ``. -4. Export the access key, secret key, AWS bucket name, and AWS location: +1. Create a bucket on AWS `` with versioning + enabled. +2. In Orbit, create an AWS storage location `` + with an AWS bucket ``. +3. In Orbit, create an AWS location ``. +4. Create a container on Azure ``. +5. In Orbit, create an Azure storage location + `` with an Azure container ``. +6. Export the keys, bucket name, container name, and storage location names + (for example, in `.env` and `.secrets.env`): ``` -export AWS_S3_BACKBEAT_ACCESS_KEY= -export AWS_S3_BACKBEAT_SECRET_KEY= -export AWS_S3_BACKBEAT_BUCKET_NAME= -export AWS_DESTINATION_LOCATION= -export AWS_SOURCE_LOCATION= +export AWS_S3_BACKEND_ACCESS_KEY= +export AWS_S3_BACKEND_SECRET_KEY= +export AWS_S3_BACKBEAT_BUCKET_NAME= +export AWS_S3_BACKEND_DESTINATION_LOCATION= +export AWS_S3_BACKEND_SOURCE_LOCATION= +export AZURE_BACKEND_ACCOUNT_NAME= +export AZURE_BACKEND_ACCESS_KEY= +export AZURE_BACKEND_ENDPOINT= +export AZURE_BACKBEAT_CONTAINER_NAME= +export AZURE_BACKEND_DESTINATION_LOCATION= ``` -5. Run the test suite: `npm run test_crr`. +7. If using `*.env` files, source the files: + +``` +source .env && source .secrets.env +``` + +8. Run the test suite: `npm run test_crr`. ### Tests for Backbeat API: -1. Create a bucket on AWS `` with versioning enabled. -2. In Orbit, create an AWS location `` with an AWS - ``. -3. Export the access key, secret key, AWS bucket name, and AWS location: +1. Create a bucket on AWS `` with versioning + enabled. +2. In Orbit, create an AWS location `` with an + AWS bucket ``. +3. Export the keys, AWS bucket name, and AWS location (for example, in `.env` + and `.secrets.env`): + +``` +export AWS_S3_BACKEND_ACCESS_KEY= +export AWS_S3_BACKEND_SECRET_KEY= +export AWS_S3_BACKBEAT_BUCKET_NAME= +export AWS_S3_BACKEND_DESTINATION_LOCATION= +``` + +4. If using `*.env` files, source the files: ``` -export AWS_S3_BACKBEAT_ACCESS_KEY= -export AWS_S3_BACKBEAT_SECRET_KEY= -export AWS_S3_BACKBEAT_BUCKET_NAME= -export AWS_DESTINATION_LOCATION= +source .env && source .secrets.env ``` -4. Run the test suite: `npm run test_api`. +5. Run the test suite: `npm run test_api`. diff --git a/tests/zenko_e2e/backbeat/azureSDK.js b/tests/zenko_e2e/backbeat/azureSDK.js new file mode 100644 index 0000000000..f84f2e6b71 --- /dev/null +++ b/tests/zenko_e2e/backbeat/azureSDK.js @@ -0,0 +1,10 @@ +const azure = require('azure-storage'); + +const storageAccount = process.env.AZURE_BACKBEAT_ACCOUNT_NAME; +const storageAccessKey = process.env.AZURE_BACKBEAT_ACCESS_KEY; +const storageEndpoint = process.env.AZURE_BACKBEAT_ENDPOINT; + +const sharedBlobSvc = + azure.createBlobService(storageAccount, storageAccessKey, storageEndpoint); + +module.exports = sharedBlobSvc; diff --git a/tests/zenko_e2e/backbeat/package-lock.json b/tests/zenko_e2e/backbeat/package-lock.json index 716d3e3509..47abba6744 100644 --- a/tests/zenko_e2e/backbeat/package-lock.json +++ b/tests/zenko_e2e/backbeat/package-lock.json @@ -64,6 +64,49 @@ "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.7.0.tgz", "integrity": "sha512-32NDda82rhwD9/JBCCkB+MRYDp0oSvlo2IL6rQWA10PQi7tDUM3eqMSltXmY+Oyl/7N3P3qNtAlv7X0d9bI28w==" }, + "azure-storage": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/azure-storage/-/azure-storage-2.10.0.tgz", + "integrity": "sha1-AgrDQyYsVVLvhlFsu3Z5JB6V5N4=", + "requires": { + "browserify-mime": "1.2.9", + "extend": "1.2.1", + "json-edm-parser": "0.1.2", + "md5.js": "1.3.4", + "readable-stream": "2.0.6", + "request": "2.87.0", + "underscore": "1.8.3", + "uuid": "3.0.0", + "validator": "9.4.1", + "xml2js": "0.2.8", + "xmlbuilder": "0.4.3" + }, + "dependencies": { + "extend": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/extend/-/extend-1.2.1.tgz", + "integrity": "sha1-oPX9bPyDpf5J72mNYOyKYk3UV2w=" + }, + "sax": { + "version": "0.5.8", + "resolved": "https://registry.npmjs.org/sax/-/sax-0.5.8.tgz", + "integrity": "sha1-1HLbIo6zMcJQaw6MFVJK25OdEsE=" + }, + "xml2js": { + "version": "0.2.8", + "resolved": "https://registry.npmjs.org/xml2js/-/xml2js-0.2.8.tgz", + "integrity": "sha1-m4FpCTFjH/CdGVdUn69U9PmAs8I=", + "requires": { + "sax": "0.5.8" + } + }, + "xmlbuilder": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/xmlbuilder/-/xmlbuilder-0.4.3.tgz", + "integrity": "sha1-xGFLp04K0ZbmCcknLNnh3bKKilg=" + } + } + }, "base64-js": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.3.0.tgz", @@ -78,6 +121,11 @@ "tweetnacl": "^0.14.3" } }, + "browserify-mime": { + "version": "1.2.9", + "resolved": "https://registry.npmjs.org/browserify-mime/-/browserify-mime-1.2.9.tgz", + "integrity": "sha1-rrGvKN5sDXpqLOQK22j/GEIq8x8=" + }, "buffer": { "version": "4.9.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-4.9.1.tgz", @@ -238,6 +286,15 @@ "har-schema": "^2.0.0" } }, + "hash-base": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/hash-base/-/hash-base-3.0.4.tgz", + "integrity": "sha1-X8hoaEfs1zSZQDMZprCj8/auSRg=", + "requires": { + "inherits": "2.0.3", + "safe-buffer": "5.1.2" + } + }, "http-signature": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", @@ -305,6 +362,14 @@ "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=", "optional": true }, + "json-edm-parser": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/json-edm-parser/-/json-edm-parser-0.1.2.tgz", + "integrity": "sha1-HmCw/vG8CvZ7wNFG393lSGzWFbQ=", + "requires": { + "jsonparse": "1.2.0" + } + }, "json-schema": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", @@ -320,6 +385,11 @@ "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" }, + "jsonparse": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/jsonparse/-/jsonparse-1.2.0.tgz", + "integrity": "sha1-XAxWhRBxYOcv50ib3eoLRMK8Z70=" + }, "jsprim": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", @@ -341,6 +411,15 @@ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-2.7.3.tgz", "integrity": "sha1-bUUk6LlV+V1PW1iFHOId1y+06VI=" }, + "md5.js": { + "version": "1.3.4", + "resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.4.tgz", + "integrity": "sha1-6b296UogpawYsENA/Fdk1bCdkB0=", + "requires": { + "hash-base": "3.0.4", + "inherits": "2.0.3" + } + }, "mime-db": { "version": "1.35.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.35.0.tgz", @@ -407,6 +486,11 @@ "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" }, + "process-nextick-args": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-1.0.7.tgz", + "integrity": "sha1-FQ4gt1ZZCtP5EJPyWk8q2L/zC6M=" + }, "punycode": { "version": "1.3.2", "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.3.2.tgz", @@ -422,6 +506,19 @@ "resolved": "https://registry.npmjs.org/querystring/-/querystring-0.2.0.tgz", "integrity": "sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA=" }, + "readable-stream": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.0.6.tgz", + "integrity": "sha1-j5A0HmilPMySh4jaz80Rs265t44=", + "requires": { + "core-util-is": "1.0.2", + "inherits": "2.0.3", + "isarray": "1.0.0", + "process-nextick-args": "1.0.7", + "string_decoder": "0.10.31", + "util-deprecate": "1.0.2" + } + }, "request": { "version": "2.87.0", "resolved": "https://registry.npmjs.org/request/-/request-2.87.0.tgz", @@ -492,6 +589,11 @@ "tweetnacl": "~0.14.0" } }, + "string_decoder": { + "version": "0.10.31", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz", + "integrity": "sha1-YuIDvEF2bGwoyfyEMB2rHFMQ+pQ=" + }, "supports-color": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-1.2.0.tgz", @@ -526,6 +628,11 @@ "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=", "optional": true }, + "underscore": { + "version": "1.8.3", + "resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz", + "integrity": "sha1-Tz+1OxBuYJf8+ctBCfKl6b36UCI=" + }, "url": { "version": "0.10.3", "resolved": "https://registry.npmjs.org/url/-/url-0.10.3.tgz", @@ -535,11 +642,21 @@ "querystring": "0.2.0" } }, + "util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=" + }, "uuid": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.0.0.tgz", "integrity": "sha1-Zyj8BFnEUNeWqZwxg3VpvfZy1yg=" }, + "validator": { + "version": "9.4.1", + "resolved": "https://registry.npmjs.org/validator/-/validator-9.4.1.tgz", + "integrity": "sha512-YV5KjzvRmSyJ1ee/Dm5UED0G+1L4GZnLN3w6/T+zZm8scVua4sOhYKWTUrKa0H/tMiJyO9QLHMPN+9mB/aMunA==" + }, "verror": { "version": "1.10.0", "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", diff --git a/tests/zenko_e2e/backbeat/package.json b/tests/zenko_e2e/backbeat/package.json index 0725eeab8a..7762b3c7c6 100644 --- a/tests/zenko_e2e/backbeat/package.json +++ b/tests/zenko_e2e/backbeat/package.json @@ -13,6 +13,7 @@ "dependencies": { "async": "2.1.2", "aws-sdk": "2.28.0", + "azure-storage": "^2.10.0", "mocha": "2.3.4", "request": "^2.87.0" }, diff --git a/tests/zenko_e2e/backbeat/s3SDK.js b/tests/zenko_e2e/backbeat/s3SDK.js index cc4f883e09..902d80c531 100644 --- a/tests/zenko_e2e/backbeat/s3SDK.js +++ b/tests/zenko_e2e/backbeat/s3SDK.js @@ -1,8 +1,8 @@ const S3 = require('aws-sdk').S3; const scalityS3Client = new S3({ - accessKeyId: process.env.ZENKO_BACKBEAT_ACCESS_KEY, - secretAccessKey: process.env.ZENKO_BACKBEAT_SECRET_KEY, + accessKeyId: process.env.ZENKO_STORAGE_ACCOUNT_ACCESS_KEY, + secretAccessKey: process.env.ZENKO_STORAGE_ACCOUNT_SECRET_KEY, sslEnabled: false, endpoint: 'http://zenko.local', apiVersions: { s3: '2006-03-01' }, @@ -20,8 +20,8 @@ const scalityS3Client = new S3({ }); const awsS3Client = new S3({ - accessKeyId: process.env.AWS_S3_BACKBEAT_ACCESS_KEY, - secretAccessKey: process.env.AWS_S3_BACKBEAT_SECRET_KEY, + accessKeyId: process.env.AWS_S3_BACKEND_ACCESS_KEY, + secretAccessKey: process.env.AWS_S3_BACKEND_SECRET_KEY, sslEnabled: true, endpoint: 'https://s3.amazonaws.com', apiVersions: { s3: '2006-03-01' }, diff --git a/tests/zenko_e2e/backbeat/tests/api/objectMonitor.js b/tests/zenko_e2e/backbeat/tests/api/objectMonitor.js index f00b85a01a..71ea7b2f78 100644 --- a/tests/zenko_e2e/backbeat/tests/api/objectMonitor.js +++ b/tests/zenko_e2e/backbeat/tests/api/objectMonitor.js @@ -10,7 +10,7 @@ const scalityUtils = new ReplicationUtility(scalityS3Client); const awsUtils = new ReplicationUtility(awsS3Client); const srcBucket = `source-bucket-${Date.now()}`; const destBucket = process.env.AWS_S3_BACKBEAT_BUCKET_NAME; -const destLocation = process.env.AWS_DESTINATION_LOCATION; +const destLocation = process.env.AWS_S3_BACKEND_DESTINATION_LOCATION; const hex = crypto.createHash('md5') .update(Math.random().toString()) .digest('hex'); diff --git a/tests/zenko_e2e/backbeat/tests/crr/awsBackend.js b/tests/zenko_e2e/backbeat/tests/crr/awsBackend.js index 1843124fe8..db76d4129e 100644 --- a/tests/zenko_e2e/backbeat/tests/crr/awsBackend.js +++ b/tests/zenko_e2e/backbeat/tests/crr/awsBackend.js @@ -9,7 +9,7 @@ const scalityUtils = new ReplicationUtility(scalityS3Client); const awsUtils = new ReplicationUtility(awsS3Client); const srcBucket = `source-bucket-${Date.now()}`; const destBucket = process.env.AWS_S3_BACKBEAT_BUCKET_NAME; -const destLocation = process.env.AWS_DESTINATION_LOCATION; +const destLocation = process.env.AWS_S3_BACKEND_DESTINATION_LOCATION; const hex = crypto.createHash('md5') .update(Math.random().toString()) .digest('hex'); diff --git a/tests/zenko_e2e/backbeat/tests/crr/azureBackend.js b/tests/zenko_e2e/backbeat/tests/crr/azureBackend.js new file mode 100644 index 0000000000..778bf1b84b --- /dev/null +++ b/tests/zenko_e2e/backbeat/tests/crr/azureBackend.js @@ -0,0 +1,148 @@ +const assert = require('assert'); +const crypto = require('crypto'); +const { series } = require('async'); + +const { scalityS3Client } = require('../../s3SDK'); +const sharedBlobSvc = require('../../azureSDK'); +const ReplicationUtility = require('../../ReplicationUtility'); + +const utils = new ReplicationUtility(scalityS3Client, sharedBlobSvc); +const destContainer = process.env.AZURE_BACKBEAT_CONTAINER_NAME; +const destLocation = process.env.AZURE_BACKEND_DESTINATION_LOCATION; +const srcBucket = `source-bucket-${Date.now()}`; +const hex = crypto.createHash('md5') + .update(Math.random().toString()) + .digest('hex'); +const keyPrefix = `${srcBucket}/${hex}`; +const key = `${keyPrefix}/object-to-replicate-${Date.now()}`; +const copyKey = `${key}-copy`; +const copySource = `/${srcBucket}/${key}`; +// Blob name with 1024 character Azure blob name limit. +const keyutf8 = `${keyPrefix}/%EA%9D%8B崰㈌㒈保轖䳷䀰⺩ቆ楪秲ⴝ㿅鼎ꓜ퇬枅࿷염곞召㸾⌙ꪊᆐ庍뉆䌗幐鸆䛃➟녩ˍ뙪臅⠙≼绒벊냂詴 끴鹲萯⇂㭢䈊퉉楝舳㷖족痴䧫㾵᏷ำꎆ꼵껪멷㄀誕㳓腜쒃컹㑻鳃삚舿췈孨੦⮀NJ곓⵪꺼꜈嗼뫘悕錸瑺⁤⑬১㵀⡸Ҏ礄䧛졼⮦ٞ쫁퓡厈譤擵泶鵇俻縫륒㜓垻ㆩꝿ詀펉ᆙ舑䜾힑藪碙ꀎꂰ췊Ᏻ 㘺幽醛잯ද汧Ꟑꛒⶨ쪸숞헹㭔ꡔᘼ뺓ᡆ᡾ᑟ䅅퀭耓弧⢠⇙폪ް蛧⃪Ἔ돫ꕢ븥ヲ캂䝄쟐颺ᓾ둾Ұ껗礞ᾰ瘹蒯硳풛瞋襎奺熝妒컚쉴⿂㽝㝳駵鈚䄖戭䌸᫲ᇁ䙪鸮ᐴ稫ⶭ뀟ھ⦿䴳稉ꉕ捈袿놾띐✯伤䃫⸧ꠏ瘌틳藔ˋ㫣敀䔩㭘식↴⧵佶痊牌ꪌ搒꾛æᤈべ쉴挜炩⽍舘ꆗ줣겤뒑徭Z䐨 敗羥誜嘳ֶꫜ걵ࣀ묟ኋ拃秷膤䨸菥䟆곘縧멀煣卲챸⧃⏶혣뎧邕⢄⭖陙䣎灏ꗛ僚䌁䠒䲎둘ꪎ傩쿌ᨌ뀻阥눉넠猌ㆯ㰢船戦跏灳蝒礯鞰諾벥煸珬㟑孫鞹Ƭꄹ孙ꢱ钐삺韦帇곎矇૧ਔ뙞밺㊑ک씌촃Ȅ頰ᖅ懚ホῐ꠷㯢먈㝹୥밷㮇䘖桲阥黾噘흳뵿澚㷞꫽鲂♤蔏앜嶃쎘嵥撞㒲 댦坪繤삮憫푇噻琕䖰⒣鯤蕆힀혙狶噕皩溊烻ᓧ鈠ᴥ徰穆ꘛ蹕綻表虣誗릊翿뱩䁞ሑ唫ꇘ苉钽뗑☧≳䟟踬ᶄꎶ愚쒄ꣷ鯍裊鮕漨踒ꠍ픸Ä☶莒浏钸목탬툖氭锰ꌒ⬧䨑렌肣꾯༭炢뤂㉥ˠٸ൪㤌ᶟ訧ᜒೳ揪Ⴛ摖㸣᳑⹞걀ꢢ䏹" 똣왷䉑摴둜辍㫣ზ㥌甦鵗⾃ꗹ빖ꓡ㲑㩝〯蘼᫩헸ῖ"` + // eslint-disable-line +'%EA%9D%8B崰㈌㒈保轖䳷䀰⺩ቆ楪秲ⴝ㿅鼎僷ꈅꓜ퇬枅࿷염곞召㸾⌙ꪊᆐ庍뉆䌗↎舳㷖족幐鸆蹪幐䎺誧洗靁麀厷ℷ쫤ᛩ꺶㖭簹릍铰᫫眘쁽暨鿐魪셑蛃춧㡡竺뫁噛̷ᗰⷑ錜⑔痴䧫㾵᏷ำꎆ꼵껪멷㄀誕㳓腜쒃컹㑻鳃삚舿췈孨੦⮀NJ곓⵪꺼꜈嗼뫘悕錸瑺⁤⑬১㵀⡸Ҏ礄䧛졼⮦ٞ쫁퓡厈譤擵泶鵇俻縫륒㜓垻ㆩꝿ詀펉ᆙ舑䜾힑藪碙ꀎꂰ췊Ᏻ 㘺幽醛잯ද汧Ꟑꛒⶨ쪸숞헹㭔ꡔᘼ뺓ᡆ᡾ᑟ䅅퀭耓弧⢠⇙폪ް蛧⃪Ἔ돫ꕢ븥ヲ캂䝄쟐颺ᓾ둾Ұ껗礞ᾰ瘹蒯硳풛瞋襎奺熝妒컚쉴⿂㽝㝳駵鈚䄖戭䌸᫲ᇁ䙪鸮ᐴ稫ⶭ뀟ھ⦿䴳稉ꉕ捈袿놾띐✯伤䃫⸧ꠏ瘌틳藔ˋ㫣敀䔩㭘식↴⧵佶痊牌ꪌ搒꾛æᤈべ쉴挜炩⽍舘ꆗ줣겤뒑徭Z䐨 敗羥誜嘳ֶꫜ걵ࣀ묟ኋ拃秷膤䨸菥䟆곘縧멀煣卲챸⧃⏶혣뎧邕⢄⭖陙䣎灏ꗛ僚䌁䠒䲎둘ꪎ傩쿌ᨌ뀻阥눉넠猌ㆯ㰢船戦跏灳蝒礯鞰諾벥煸珬㟑孫鞹Ƭꄹ孙ꢱ钐삺韦帇곎矇૧ਔ뙞밺㊑ک씌촃Ȅ頰ᖅ懚ホῐ꠷㯢먈㝹୥밷㮇䘖桲阥黾噘흳뵿澚㷞꫽鲂♤蔏앜嶃쎘嵥撞㒲 댦坪繤삮憫푇噻琕䖰虣誗릊翿뱩䁞ሑ唫ꇘ苉钽뗑☧≳䟟踬ᶄꎶ愚쒄ꣷ鯍裊鮕漨踒ꠍ목탬툖氭锰ꌒ⬧䨑렌肣꾯༭炢뤂㉥ˠٸ൪㤌ᶟ訧ᜒೳ揪Ⴛ摖㸣᳑⹞걀ꢢ䏹" 똣왷䉑摴둜辍㫣ზ㥌甦鵗⾃ꗹ빖ꓡ㲑㩝〯蘼᫩헸ῖ"'; // eslint-disable-line +const REPLICATION_TIMEOUT = 300000; + +describe('Replication with Azure backend', function() { + this.timeout(REPLICATION_TIMEOUT); + let roleArn = 'arn:aws:iam::root:role/s3-replication-role'; + + beforeEach(done => series([ + next => utils.createVersionedBucket(srcBucket, next), + next => utils.putBucketReplicationMultipleBackend(srcBucket, + destContainer, roleArn, destLocation, next), + ], done)); + + afterEach(done => series([ + next => utils.deleteVersionedBucket(srcBucket, next), + next => utils.deleteAllBlobs(destContainer, `${srcBucket}/${keyPrefix}`, + next), + ], done)); + + it('should replicate an object', done => series([ + next => utils.putObject(srcBucket, key, Buffer.alloc(1), next), + next => utils.compareObjectsAzure(srcBucket, destContainer, key, next), + ], done)); + + it('should replicate a zero byte object', done => series([ + next => utils.putObject(srcBucket, key, undefined, next), + next => utils.compareObjectsAzure(srcBucket, destContainer, key, next), + ], done)); + + it.skip('should replicate an object with UTF-8 encoding', done => series([ + next => utils.putObject(srcBucket, keyutf8, Buffer.alloc(1), next), + next => utils.compareObjectsAzure(srcBucket, destContainer, keyutf8, + next), + ], done)); + + it('should replicate a copied object', done => series([ + next => utils.putObject(srcBucket, key, Buffer.alloc(1), next), + next => utils.copyObject(srcBucket, copySource, copyKey, next), + next => utils.compareObjectsAzure(srcBucket, destContainer, copyKey, + next), + // avoid a race with cleanup by ensuring everything is replicated + next => utils.waitUntilReplicated(srcBucket, key, undefined, next), + ], done)); + + it('should replicate a MPU object: 2 parts', done => series([ + next => utils.completeMPUAWS(srcBucket, key, 2, next), + next => utils.compareObjectsAzure(srcBucket, destContainer, key, next), + ], done)); + + it('should replicate a MPU object: 10 parts', done => series([ + next => utils.completeMPUAWS(srcBucket, key, 10, next), + next => utils.compareObjectsAzure(srcBucket, destContainer, key, next), + ], done)); + + [undefined, + `0-${1024 * 1024 * 5}`, + `${1024 * 1024 * 2}-${1024 * 1024 * 7}`].forEach(range => + it('should replicate a MPU with parts copied from another MPU with ' + + `byte range '${range}' for each part`, done => series([ + next => utils.completeMPUAWS(srcBucket, key, 2, next), + next => utils.completeMPUWithPartCopy(srcBucket, copyKey, + copySource, range, 2, next), + next => utils.compareObjectsAzure(srcBucket, destContainer, copyKey, + next), + // avoid a race with cleanup by ensuring everything is replicated + next => utils.waitUntilReplicated(srcBucket, key, undefined, next), + ], done))); + + it('should delete the destination object when putting a delete marker on ' + + 'the source object', done => series([ + next => utils.putObject(srcBucket, key, Buffer.alloc(1), next), + next => utils.compareObjectsAzure(srcBucket, destContainer, key, next), + next => utils.deleteObject(srcBucket, key, null, next), + next => utils.assertNoObject(srcBucket, key, next), + next => utils.waitUntilDeleted(destContainer, `${srcBucket}/${key}`, + 'azure', next), + next => utils.getBlobToText(destContainer, `${srcBucket}/${key}`, + err => { + assert.strictEqual(err.code, 'BlobNotFound'); + return next(); + }), + ], done)); + + it.skip('should replicate object tags of the latest version', done => + series([ + next => utils.putObject(srcBucket, key, Buffer.alloc(1), next), + next => utils.compareObjectsAzure(srcBucket, destContainer, key, next), + next => utils.putObjectTagging(srcBucket, key, undefined, next), + next => utils.compareObjectTagsAzure(srcBucket, destContainer, key, + undefined, next), + ], done)); + + it.skip('should replicate deleting object tags of the latest version', + done => series([ + next => utils.putObject(srcBucket, key, Buffer.alloc(1), next), + next => utils.compareObjectsAzure(srcBucket, destContainer, key, next), + next => utils.putObjectTagging(srcBucket, key, undefined, next), + next => utils.compareObjectTagsAzure(srcBucket, destContainer, key, + undefined, next), + next => utils.deleteObjectTagging(srcBucket, key, undefined, next), + next => utils.compareObjectTagsAzure(srcBucket, destContainer, key, + undefined, next), + ], done)); + + it('should replicate an object with properties', done => series([ + next => utils.putObjectWithProperties(srcBucket, key, Buffer.alloc(1), + next), + next => utils.compareAzureObjectProperties(srcBucket, destContainer, + key, next), + ], done)); + + it('should replicate a copied object with properties', done => series([ + next => utils.putObjectWithProperties(srcBucket, key, Buffer.alloc(1), + next), + next => utils.copyObject(srcBucket, copySource, copyKey, next), + next => utils.compareAzureObjectProperties(srcBucket, destContainer, + copyKey, next), + // avoid a race with cleanup by ensuring everything is replicated + next => utils.waitUntilReplicated(srcBucket, key, undefined, next), + ], done)); + + it('should replicate a MPU object with properties', done => series([ + next => utils.completeMPUAWSWithProperties(srcBucket, key, 2, next), + next => utils.compareAzureObjectProperties(srcBucket, destContainer, + key, next), + ], done)); +});