Skip to content

Commit

Permalink
Merge pull request #69 from delphix-integrations/develop
Browse files Browse the repository at this point in the history
HUBS-2049 | Release Version 2.1.0 | Develop -> Main
  • Loading branch information
Uddipaan-Hazarika authored Oct 5, 2023
2 parents 01ac55d + 3b26681 commit 2f9a77d
Show file tree
Hide file tree
Showing 15 changed files with 1,068 additions and 40 deletions.
2 changes: 1 addition & 1 deletion .goreleaser.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# Visit https://goreleaser.com for documentation on how to customize this
# behavior.
env:
- PROVIDER_VERSION=2.0.0
- PROVIDER_VERSION=2.1.0
before:
hooks:
# this is just an example and not a requirement for provider building/publishing
Expand Down
2 changes: 1 addition & 1 deletion GNUmakefile
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ HOSTNAME=delphix.com
NAMESPACE=dct
NAME=delphix
BINARY=terraform-provider-${NAME}
VERSION=2.0.0
VERSION=2.1.0
OS_ARCH=darwin_amd64

default: install
Expand Down
182 changes: 182 additions & 0 deletions docs/resources/appdata_dsource.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
# Resource: <resource name> delphix_appdata_dsource

In Delphix terminology, a dSource is a database that the Delphix Continuous Data Engine uses to create and update virtual copies of your database.
A dSource is created and managed by the Delphix Continuous Data Engine.

The dSource resource allows Terraform to create and delete Delphix dSources. This specifically enables the apply and destroy Terraform commands. Modification of existing dSource resources via the apply command is not supported. All supported parameters are listed below.

## System Requirements

* Data Control Tower v10.0.1+ is required for dSource management. Lower versions are not supported.
* The dSource Resource does not support Oracle, SQL Server, or SAP ASE. The below examples are shown from the PostgreSQL context. The parameters values can be updated for other connectors (i.e. AppData), such as SAP HANA, IBM Db2, etc.

## Example Usage

The linking of a dSource can be configured through various ingestion approaches. Each configuration is customized to the connector and its supported options. The three PostgreSQL parameter sets below show working examples.

```hcl
# Link dSource using external backup.
resource "delphix_appdata_dsource" "dsource_name" {
source_id = SOURCE_ID
group_id = GROUP_ID
log_sync_enabled = false
make_current_account_owner = true
link_type = LINK_TYPE
name = DSOURCE_NAME
staging_mount_base = MOUNT_PATH
environment_user = ENV_USER
staging_environment = STAGING_ENV
parameters = jsonencode({
externalBackup: [
{
keepStagingInSync: false,
backupPath: BKP_PATH,
walLogPath: LOG_PATH
}
],
postgresPort : PORT,
mountLocation : MOUNT_PATH
})
sync_parameters = jsonencode({
resync = true
})
}
# Link dSource using Delphix Initiated Backup.
resource "delphix_appdata_dsource" "dsource_name" {
source_id = SOURCE_ID
group_id = GROUP_ID
log_sync_enabled = false
make_current_account_owner = true
link_type = LINK_TYPE
name = DSOURCE_NAME
staging_mount_base = MOUNT_PATH
environment_user = ENV_USER
staging_environment = STAGING_ENV
parameters = jsonencode({
delphixInitiatedBackupFlag : true,
delphixInitiatedBackup : [
{
userName : USERNAME,
postgresSourcePort : SOURCE_PORT,
userPass : PASSWORD,
sourceHostAddress : SOURCE_ADDRESS
}
],
postgresPort : PORT,
mountLocation : MOUNT_PATH
})
sync_parameters = jsonencode({
resync = true
})
}
# Link dSource using Single Database Ingestion.
resource "delphix_appdata_dsource" "dsource_name" {
source_id = SOURCE_ID
group_id = GROUP_ID
log_sync_enabled = false
make_current_account_owner = true
link_type = LINK_TYPE
name = DSOURCE_NAME
staging_mount_base = MOUNT_PATH
environment_user = ENV_USER
staging_environment = STAGING_ENV
parameters = jsonencode({
singleDatabaseIngestionFlag : true,
singleDatabaseIngestion : [
{
databaseUserName: DBUSER_NAME,
sourcePort: SOURCE_PORT,
dumpJobs: 2,
restoreJobs: 2,
databaseName: DB_NAME,
databaseUserPassword: DB_PASS,
dumpDir: DIR,
sourceHost: SOURCE_HOST
postgresqlFile: FILE
}
],
postgresPort : PORT,
mountLocation : MOUNT_PATH
})
sync_parameters = jsonencode({
resync = true
})
}
```

## Argument Reference

* `source_id` - (Required) Id of the source to link.

* `group_id` - (Required) Id of the dataset group where this dSource should belong to.

* `log_sync_enabled` - (Required) True if LogSync should run for this database.

* `make_current_account_owner` - (Required) Whether the account creating this reporting schedule must be configured as owner of the reporting schedule.

* `description` - (Optional) The notes/description for the dSource.

* `link_type` - (Required) The type of link to create. Default is AppDataDirect.
* `AppDataDirect` - Represents the AppData specific parameters of a link request for a source directly replicated into the Delphix Engine.
* `AppDataStaged` - Represents the AppData specific parameters of a link request for a source with a staging source.

* `name` - (Optional) The unique name of the dSource. If unset, a name is randomly generated.

* `staging_mount_base` - (Optional) The base mount point for the NFS mount on the staging environment [AppDataStaged only].

* `environment_user` - (Required) The OS user to use for linking.

* `staging_environment` - (Required) The environment used as an intermediate stage to pull data into Delphix [AppDataStaged only].

* `staging_environment_user` - (Optional) The environment user used to access the staging environment [AppDataStaged only].

* `tags` - (Optional) The tags to be created for dSource. This is a map of 2 parameters:
* `key` - (Required) Key of the tag
* `value` - (Required) Value of the tag

* `ops_pre_sync` - (Optional) Operations to perform before syncing the created dSource. These operations can quiesce any data prior to syncing
* `name` - Name of the hook
* `command` - Command to be executed
* `shell` - Type of shell. Valid values are `[bash, shell, expect, ps, psd]`
* `credentials_env_vars` - List of environment variables that will contain credentials for this operation
* `base_var_name` - Base name of the environment variables. Variables are named by appending '_USER', '_PASSWORD', '_PUBKEY' and '_PRIVKEY' to this base name, respectively. Variables whose values are not entered or are not present in the type of credential or vault selected, will not be set.
* `password` - Password to assign to the environment variables.
* `vault` - The name or reference of the vault to assign to the environment variables.
* `hashicorp_vault_engine` - Vault engine name where the credential is stored.
* `hashicorp_vault_secret_path` - Path in the vault engine where the credential is stored.
* `hashicorp_vault_username_key` - Hashicorp vault key for the username in the key-value store.
* `hashicorp_vault_secret_key` - Hashicorp vault key for the password in the key-value store.
* `azure_vault_name` - Azure key vault name.
* `azure_vault_username_key` - Azure vault key in the key-value store.
* `azure_vault_secret_key` - Azure vault key in the key-value store.
* `cyberark_vault_query_string` - Query to find a credential in the CyberArk vault.

* `ops_post_sync` - (Optional) Operations to perform after syncing a created dSource.
* `name` - Name of the hook
* `command` - Command to be executed
* `shell` - Type of shell. Valid values are `[bash, shell, expect, ps, psd]`
* `credentials_env_vars` - List of environment variables that will contain credentials for this operation
* `base_var_name` - Base name of the environment variables. Variables are named by appending '_USER', '_PASSWORD', '_PUBKEY' and '_PRIVKEY' to this base name, respectively. Variables whose values are not entered or are not present in the type of credential or vault selected, will not be set.
* `password` - Password to assign to the environment variables.
* `vault` - The name or reference of the vault to assign to the environment variables.
* `hashicorp_vault_engine` - Vault engine name where the credential is stored.
* `hashicorp_vault_secret_path` - Path in the vault engine where the credential is stored.
* `hashicorp_vault_username_key` - Hashicorp vault key for the username in the key-value store.
* `hashicorp_vault_secret_key` - Hashicorp vault key for the password in the key-value store.
* `azure_vault_name` - Azure key vault name.
* `azure_vault_username_key` - Azure vault key in the key-value store.
* `azure_vault_secret_key` - Azure vault key in the key-value store.
* `cyberark_vault_query_string` - Query to find a credential in the CyberArk vault.

* `excludes` - (Optional) List of subdirectories in the source to exclude when syncing data.These paths are relative to the root of the source directory. [AppDataDirect only]

* `follow_symlinks` - (Optional) List of symlinks in the source to follow when syncing data.These paths are relative to the root of the source directory. All other symlinks are preserved. [AppDataDirect only]

* `parameters` - (Optional) The JSON payload is based on the type of dSource being created. Different data sources require different parameters.

* `sync_parameters` - (Optional) The JSON payload conforming to the snapshot parameters definition in a LUA toolkit or platform plugin.
4 changes: 1 addition & 3 deletions docs/resources/vdb.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,7 @@
In Delphix terminology, a VDB is a database provisioned from either a dSource or another VDB which is a full read/write copy of the source data.
A VDB is created and managed by the Delphix Continuous Data Engine.


The VDB resource allows terraform to CREATE (also known as Provision), READ, UPDATE and DELETE Delphix Virtual Databases (VDB).
Update operation does not support all VDB parameters. The supported parameters are listed below.
The VDB resource allows Terraform to create, update, and delete Delphix VDBs. This specifically enables the apply and destroy Terraform commands. Update operation does not support all VDB parameters. All supported parameters are listed below.

## Example Usage
Provisioning can be done in 2 methods, provision by snapshot and provision by timestamp.
Expand Down
131 changes: 131 additions & 0 deletions examples/dsource/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
/**
* Summary: This template showcases the properties available when creating an app data dsource.
*/

terraform {
required_providers {
delphix = {
version = "VERSION"
source = "delphix-integrations/delphix"
}
}
}

provider "delphix" {
tls_insecure_skip = true
key = "1.XXXX"
host = "HOSTNAME"
}


# resource "delphix_appdata_dsource" "test_app_data_dsource" {
# source_id = "1-APPDATA_STAGED_SOURCE_CONFIG-6"
# group_id = "1-GROUP-1"
# log_sync_enabled = false
# make_current_account_owner = true
# link_type = "AppDataStaged"
# name = "appdata_dsource"
# staging_mount_base = ""
# environment_user = "HOST_USER-2"
# staging_environment = "1-UNIX_HOST_ENVIRONMENT-2"
# parameters = jsonencode({
# externalBackup : [],
# delphixInitiatedBackupFlag : true,
# delphixInitiatedBackup : [
# {
# userName : "XXXX",
# postgresSourcePort : XXXX,
# userPass : "XXXX",
# sourceHostAddress : "HOSTNAME"
# }
# ],
# singleDatabaseIngestionFlag : false,
# singleDatabaseIngestion : [],
# stagingPushFlag : false,
# postgresPort : XXXX,
# configSettingsStg : [],
# mountLocation : "/tmp/delphix_mnt"
# })
# sync_parameters = jsonencode({
# resync = true
# })
# }

resource "delphix_appdata_dsource" "test_app_data_dsource_second" {
source_id = "1-APPDATA_STAGED_SOURCE_CONFIG-7"
group_id = "1-GROUP-1"
log_sync_enabled = false
make_current_account_owner = true
link_type = "AppDataStaged"
name = "appdata_dsource_second"
staging_mount_base = ""
environment_user = "HOST_USER-2"
staging_environment = "1-UNIX_HOST_ENVIRONMENT-2"
parameters = jsonencode({
delphixInitiatedBackupFlag : true,
delphixInitiatedBackup : [
{
userName : "XXXX",
postgresSourcePort : XXXX,
userPass : "XXXX",
sourceHostAddress : "HOSTNAME"
}
],
postgresPort : XXX,
mountLocation : "/tmp/delphix_mnt_second"
})
sync_parameters = jsonencode({
resync = true
})
ops_pre_sync {
name = "key-1"
command = "echo \"hello world\""
shell = "shell"
credentials_env_vars {
base_var_name = "XXXX"
password = "XXXX"
}
}
ops_post_sync {
name = "key-2"
command = "echo \"hello world\""
shell = "shell"
credentials_env_vars {
base_var_name = "XXXX"
password = "XXXX"
}
}
}


# Below are the 3 ways to link dsource with params , use any one of them
# externalBackup: [
# {
# keepStagingInSync: false,
# backupPath: "/var/tmp/backup",
# walLogPath: "/var/tmp/backup"
# }
# ]

# singleDatabaseIngestion: [
# {
# databaseUserName: "postgres",
# sourcePort: 5432,
# dumpJobs: 2,
# restoreJobs: 2,
# databaseName: "abcd",
# databaseUserPassword: "xxxx",
# dumpDir: "abcd",
# sourceHost: "abcd",
# postgresqlFile: "abcd"
# }
# ]

# delphixInitiatedBackup : [
# {
# userName : "XXXX",
# postgresSourcePort : XXXX,
# userPass : "XXXX",
# sourceHostAddress : "HOSTNAME"
# }
# ]
4 changes: 2 additions & 2 deletions go.mod
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
module terraform-provider-delphix

go 1.17
go 1.21.1

require (
github.com/delphix/dct-sdk-go v1.6.0
github.com/delphix/dct-sdk-go/v10 v10.0.0
github.com/hashicorp/terraform-plugin-sdk v1.17.2
)

Expand Down
Loading

0 comments on commit 2f9a77d

Please sign in to comment.