Skip to content

Commit 49b6aaa

Browse files
Merge pull request #99 from delphix-integrations/develop
HUBS-2054 | Terraform 3.3.0 | Dev to Main
2 parents 6204f77 + 02e6894 commit 49b6aaa

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+953
-915
lines changed

.github/workflows/codeql.yml

Lines changed: 46 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,10 @@
22
# Copyright (c) 2023, 2024 by Delphix. All rights reserved.
33
#
44
#
5+
#
6+
# Copyright (c) 2023, 2024 by Delphix. All rights reserved.
7+
#
8+
#
59
# For most projects, this workflow file will not need changing; you simply need
610
# to commit it to your repository.
711
#
@@ -17,71 +21,80 @@ name: "CodeQL"
1721

1822
on:
1923
push:
20-
branches: [ main, develop ]
24+
branches: [ "main", "**/*", "dependabot/**/*", "dlpx/**/*", "gh-readonly-queue/**/*", "projects/**/*" ]
2125
pull_request:
22-
# The branches below must be a subset of the branches above
23-
branches: [ "develop" ]
26+
branches: [ "main", "**/*", "dependabot/**/*", "dlpx/**/*", "gh-readonly-queue/**/*", "projects/**/*" ]
2427
schedule:
25-
- cron: '40 13 * * 5'
28+
- cron: '30 11 * * 1'
2629

2730
jobs:
2831
analyze:
29-
name: Analyze
32+
name: Analyze (${{ matrix.language }})
3033
# Runner size impacts CodeQL analysis time. To learn more, please see:
3134
# - https://gh.io/recommended-hardware-resources-for-running-codeql
3235
# - https://gh.io/supported-runners-and-hardware-resources
33-
# - https://gh.io/using-larger-runners
34-
# Consider using larger runners for possible analysis time improvements.
36+
# - https://gh.io/using-larger-runners (GitHub.com only)
37+
# Consider using larger runners or machines with greater resources for possible analysis time improvements.
3538
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
3639
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
3740
permissions:
41+
# required for all workflows
42+
security-events: write
43+
44+
# required to fetch internal or private CodeQL packs
45+
packages: read
46+
47+
# only required for workflows in private repositories
3848
actions: read
3949
contents: read
40-
security-events: write
4150

4251
strategy:
4352
fail-fast: false
4453
matrix:
45-
language: [ 'go' ]
46-
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby', 'swift' ]
47-
# Use only 'java' to analyze code written in Java, Kotlin or both
48-
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
49-
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
50-
54+
include:
55+
- language: go
56+
build-mode: autobuild
57+
# CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift'
58+
# Use `c-cpp` to analyze code written in C, C++ or both
59+
# Use 'java-kotlin' to analyze code written in Java, Kotlin or both
60+
# Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
61+
# To learn more about changing the languages that are analyzed or customizing the build mode for your analysis,
62+
# see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning.
63+
# If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how
64+
# your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages
5165
steps:
5266
- name: Checkout repository
53-
uses: actions/checkout@v3
67+
uses: actions/checkout@v4
5468

5569
# Initializes the CodeQL tools for scanning.
5670
- name: Initialize CodeQL
57-
uses: github/codeql-action/init@v2
71+
uses: github/codeql-action/init@v3
5872
with:
5973
languages: ${{ matrix.language }}
60-
queries: security-extended,security-and-quality
74+
build-mode: ${{ matrix.build-mode }}
6175
# If you wish to specify custom queries, you can do so here or in a config file.
6276
# By default, queries listed here will override any specified in a config file.
6377
# Prefix the list here with "+" to use these queries and those in the config file.
6478

6579
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
66-
# queries: security-extended,security-and-quality
67-
80+
queries: security-extended,security-and-quality
6881

69-
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
70-
# If this step fails, then you should remove it and run the build manually (see below)
71-
- if: matrix.language == 'go'
72-
name: Autobuild
73-
uses: github/codeql-action/autobuild@v2
82+
# If the analyze step fails for one of the languages you are analyzing with
83+
# "We were unable to automatically build your code", modify the matrix above
84+
# to set the build mode to "manual" for that language. Then modify this step
85+
# to build your code.
7486
# ℹ️ Command-line programs to run using the OS shell.
7587
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
76-
77-
# If the Autobuild fails above, remove it and uncomment the following three lines.
78-
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
79-
80-
# - run: |
81-
# echo "Run, Build Application using script"
82-
# ./location_of_script_within_repo/buildscript.sh
83-
88+
- if: matrix.build-mode == 'manual'
89+
shell: bash
90+
run: |
91+
echo 'If you are using a "manual" build mode for one or more of the' \
92+
'languages you are analyzing, replace this with the commands to build' \
93+
'your code, for example:'
94+
echo ' make bootstrap'
95+
echo ' make release'
96+
exit 1
8497
- name: Perform CodeQL Analysis
85-
uses: github/codeql-action/analyze@v2
98+
uses: github/codeql-action/analyze@v3
8699
with:
87100
category: "/language:${{matrix.language}}"

.goreleaser.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# Visit https://goreleaser.com for documentation on how to customize this
22
# behavior.
33
env:
4-
- PROVIDER_VERSION=3.2.3
4+
- PROVIDER_VERSION=3.3.0
55
before:
66
hooks:
77
# this is just an example and not a requirement for provider building/publishing

GNUmakefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@ HOSTNAME=delphix.com
33
NAMESPACE=dct
44
NAME=delphix
55
BINARY=terraform-provider-${NAME}
6-
VERSION=3.2.3
7-
OS_ARCH=darwin_amd64
6+
VERSION=3.3.0
7+
OS_ARCH=darwin_arm64
88

99
default: install
1010

docs/resources/appdata_dsource.md

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -125,27 +125,27 @@ resource "delphix_appdata_dsource" "dsource_name" {
125125

126126
* `make_current_account_owner` - (Required) Whether the account creating this reporting schedule must be configured as owner of the reporting schedule.
127127

128-
* `description` - (Optional) The notes/description for the dSource.
128+
* `description` - The notes/description for the dSource.
129129

130130
* `link_type` - (Required) The type of link to create. Default is AppDataDirect.
131131
* `AppDataDirect` - Represents the AppData specific parameters of a link request for a source directly replicated into the Delphix Engine.
132132
* `AppDataStaged` - Represents the AppData specific parameters of a link request for a source with a staging source.
133133

134-
* `name` - (Optional) The unique name of the dSource. If unset, a name is randomly generated.
134+
* `name` - The unique name of the dSource. If unset, a name is randomly generated.
135135

136-
* `staging_mount_base` - (Optional) The base mount point for the NFS mount on the staging environment [AppDataStaged only].
136+
* `staging_mount_base` - The base mount point for the NFS mount on the staging environment [AppDataStaged only].
137137

138138
* `environment_user` - (Required) The OS user to use for linking.
139139

140140
* `staging_environment` - (Required) The environment used as an intermediate stage to pull data into Delphix [AppDataStaged only].
141141

142-
* `staging_environment_user` - (Optional) The environment user used to access the staging environment [AppDataStaged only].
142+
* `staging_environment_user` - The environment user used to access the staging environment [AppDataStaged only].
143143

144-
* `tags` - (Optional) The tags to be created for dSource. This is a map of 2 parameters:
144+
* `tags` - The tags to be created for dSource. This is a map of 2 parameters:
145145
* `key` - (Required) Key of the tag
146146
* `value` - (Required) Value of the tag
147147

148-
* `ops_pre_sync` - (Optional) Operations to perform before syncing the created dSource. These operations can quiesce any data prior to syncing
148+
* `ops_pre_sync` - Operations to perform before syncing the created dSource. These operations can quiesce any data prior to syncing
149149
* `name` - Name of the hook
150150
* `command` - Command to be executed
151151
* `shell` - Type of shell. Valid values are `[bash, shell, expect, ps, psd]`
@@ -162,7 +162,7 @@ resource "delphix_appdata_dsource" "dsource_name" {
162162
* `azure_vault_secret_key` - Azure vault key in the key-value store.
163163
* `cyberark_vault_query_string` - Query to find a credential in the CyberArk vault.
164164

165-
* `ops_post_sync` - (Optional) Operations to perform after syncing a created dSource.
165+
* `ops_post_sync` - Operations to perform after syncing a created dSource.
166166
* `name` - Name of the hook
167167
* `command` - Command to be executed
168168
* `shell` - Type of shell. Valid values are `[bash, shell, expect, ps, psd]`
@@ -179,14 +179,14 @@ resource "delphix_appdata_dsource" "dsource_name" {
179179
* `azure_vault_secret_key` - Azure vault key in the key-value store.
180180
* `cyberark_vault_query_string` - Query to find a credential in the CyberArk vault.
181181

182-
* `excludes` - (Optional) List of subdirectories in the source to exclude when syncing data.These paths are relative to the root of the source directory. [AppDataDirect only]
182+
* `excludes` - List of subdirectories in the source to exclude when syncing data.These paths are relative to the root of the source directory. [AppDataDirect only]
183183

184-
* `follow_symlinks` - (Optional) List of symlinks in the source to follow when syncing data.These paths are relative to the root of the source directory. All other symlinks are preserved. [AppDataDirect only]
184+
* `follow_symlinks` - List of symlinks in the source to follow when syncing data.These paths are relative to the root of the source directory. All other symlinks are preserved. [AppDataDirect only]
185185

186-
* `parameters` - (Optional) The JSON payload is based on the type of dSource being created. Different data sources require different parameters.
186+
* `parameters` - The JSON payload is based on the type of dSource being created. Different data sources require different parameters.
187187

188-
* `sync_parameters` - (Optional) The JSON payload conforming to the snapshot parameters definition in a LUA toolkit or platform plugin.
188+
* `sync_parameters` - The JSON payload conforming to the snapshot parameters definition in a LUA toolkit or platform plugin.
189189

190-
* `skip_wait_for_snapshot_creation` - (Optional) By default this resource will wait for a snapshot to be created post-dSource creation. This ensure a snapshot is available during the VDB provisioning. This behavior can be skipped by setting this parameter to `true`.
190+
* `skip_wait_for_snapshot_creation` - By default this resource will wait for a snapshot to be created post-dSource creation. This ensure a snapshot is available during the VDB provisioning. This behavior can be skipped by setting this parameter to `true`.
191191

192-
* `wait_time` - (Optional) By default this resource waits 0 minutes for a snapshot to be created. Increase the integer value as needed for larger dSource snapshots. This parameter can be ignored if 'skip_wait_for_snapshot_creation' is set to `true`.
192+
* `wait_time` - By default this resource waits 0 minutes for a snapshot to be created. Increase the integer value as needed for larger dSource snapshots. This parameter can be ignored if 'skip_wait_for_snapshot_creation' is set to `true`.

docs/resources/database_postgresql.md

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,9 +27,9 @@ resource "delphix_database_postgresql" "source" {
2727

2828
* `repository_value` - (Required) The Id or Name of the Repository onto which the source will be created..
2929

30-
* `environment_value` - (Optional) The Id or Name of the environment to create the source on.
30+
* `environment_value` - The Id or Name of the environment to create the source on.
3131

32-
* `engine_value` - (Optional) The Id or Name of the engine to create the source on.
32+
* `engine_value` - The Id or Name of the engine to create the source on.
3333

3434
* `id` - The Source object entity ID.
3535

@@ -66,3 +66,18 @@ resource "delphix_database_postgresql" "source" {
6666
* `tags` - The tags to be created for database. This is a map of 2 parameters:
6767
* `key` - Key of the tag
6868
* `value` - Value of the tag
69+
70+
## Import (Beta)
71+
72+
Use the [`import` block](https://developer.hashicorp.com/terraform/language/import) to add source configs created directly in Data Control Tower into a Terraform state file.
73+
74+
For example:
75+
```terraform
76+
import {
77+
to = delphix_database_postgresql.source_config_import
78+
id = "source_config_id"
79+
}
80+
```
81+
82+
*This is a beta feature. Delphix offers no guarantees of support or compatibility.*
83+

docs/resources/environment.md

Lines changed: 36 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -165,42 +165,42 @@ resource "delphix_environment" "fc-tgt-cluster" {
165165
* `engine_id` - (Required) The DCT ID of the Engine on which to create the environment. This ID can be obtained by querying the DCT engines API. A Delphix Engine must be registered with DCT first for it to create an Engine ID.
166166
* `os_name` - (Required) Operating system type of the environment. Valid values are `[UNIX, WINDOWS]`
167167
* `hostname` - (Required) Host Name or IP Address of the host that being added to Delphix.
168-
* `name` - (Optional) The name of the environment.
169-
* `is_cluster` - (Optional) Whether the environment to be created is a cluster.
170-
* `cluster_home` - (Optional) Absolute path to cluster home drectory. This parameter is (Required) for UNIX cluster environments.
171-
* `staging_environment` - (Optional) Id of the environment where Delphix Connector is installed. This is a (Required) parameter when creating Windows source environments.
172-
* `connector_port` - (Optional) Specify port on which Delphix connector will run. This is a (Required) parameter when creating Windows target environments.
173-
* `is_target` - (Optional) Whether the environment to be created is a target cluster environment. This property is used only when creating Windows cluster environments.
174-
* `ssh_port` - (Optional) ssh port of the environment.
175-
* `toolkit_path` - (Optional) The path where Delphix toolkit can be pushed.
176-
* `username` - (Optional) OS username for Delphix.
177-
* `password` - (Optional) OS user's password.
178-
* `vault` - (Optional) The name or reference of the vault from which to read the host credentials.
179-
* `hashicorp_vault_engine` - (Optional) Vault engine name where the credential is stored.
180-
* `hashicorp_vault_secret_path` - (Optional) Path in the vault engine where the credential is stored.
181-
* `hashicorp_vault_username_key` - (Optional) Key for the username in the key-value store.
182-
* `hashicorp_vault_secret_key` - (Optional) Key for the password in the key-value store.
183-
* `cyberark_vault_query_string` - (Optional) Query to find a credential in the CyberArk vault.
184-
* `use_kerberos_authentication` - (Optional) Whether to use kerberos authentication.
185-
* `use_engine_public_key` - (Optional) Whether to use public key authentication.
186-
* `nfs_addresses` - (Optional) Array of ip address or hostnames. Valid values are a list of addresses. For eg: `["192.168.10.2"]`
187-
* `ase_db_username` - (Optional) Username for the SAP ASE database.
188-
* `ase_db_password` - (Optional) Password for the SAP ASE database.
189-
* `ase_db_vault` - (Optional) The name or reference of the vault from which to read the ASE database credentials.
190-
* `ase_db_hashicorp_vault_engine` - (Optional) Vault engine name where the credential is stored.
191-
* `ase_db_hashicorp_vault_secret_path` - (Optional) Path in the vault engine where the credential is stored.
192-
* `ase_db_hashicorp_vault_username_key` - (Optional) Key for the username in the key-value store.
193-
* `ase_db_hashicorp_vault_secret_key` - (Optional) Key for the password in the key-value store.
194-
* `ase_db_cyberark_vault_query_string` - (Optional) Query to find a credential in the CyberArk vault.
195-
* `ase_db_use_kerberos_authentication` - (Optional) Whether to use kerberos authentication for ASE DB discovery.
196-
* `java_home` - (Optional) The path to the user managed Java Development Kit (JDK). If not specified, then the OpenJDK will be used.
197-
* `dsp_keystore_path` - (Optional) DSP keystore path.
198-
* `dsp_keystore_password` - (Optional) DSP keystore password.
199-
* `dsp_keystore_alias` - (Optional) DSP keystore alias.
200-
* `dsp_truststore_path` - (Optional) DSP truststore path.
201-
* `dsp_truststore_password` - (Optional) DSP truststore password.
202-
* `description` - (Optional) The environment description.
203-
* `tags` - (Optional) The tags to be created for this environment. This is a map of 2 parameters:
168+
* `name` - The name of the environment.
169+
* `is_cluster` - Whether the environment to be created is a cluster.
170+
* `cluster_home` - Absolute path to cluster home drectory. This parameter is (Required) for UNIX cluster environments.
171+
* `staging_environment` - Id of the environment where Delphix Connector is installed. This is a (Required) parameter when creating Windows source environments.
172+
* `connector_port` - Specify port on which Delphix connector will run. This is a (Required) parameter when creating Windows target environments.
173+
* `is_target` - Whether the environment to be created is a target cluster environment. This property is used only when creating Windows cluster environments.
174+
* `ssh_port` - ssh port of the environment.
175+
* `toolkit_path` - The path where Delphix toolkit can be pushed.
176+
* `username` - OS username for Delphix.
177+
* `password` - OS user's password.
178+
* `vault` - The name or reference of the vault from which to read the host credentials.
179+
* `hashicorp_vault_engine` - Vault engine name where the credential is stored.
180+
* `hashicorp_vault_secret_path` - Path in the vault engine where the credential is stored.
181+
* `hashicorp_vault_username_key` - Key for the username in the key-value store.
182+
* `hashicorp_vault_secret_key` - Key for the password in the key-value store.
183+
* `cyberark_vault_query_string` - Query to find a credential in the CyberArk vault.
184+
* `use_kerberos_authentication` - Whether to use kerberos authentication.
185+
* `use_engine_public_key` - Whether to use public key authentication.
186+
* `nfs_addresses` - Array of ip address or hostnames. Valid values are a list of addresses. For eg: `["192.168.10.2"]`
187+
* `ase_db_username` - Username for the SAP ASE database.
188+
* `ase_db_password` - Password for the SAP ASE database.
189+
* `ase_db_vault` - The name or reference of the vault from which to read the ASE database credentials.
190+
* `ase_db_hashicorp_vault_engine` - Vault engine name where the credential is stored.
191+
* `ase_db_hashicorp_vault_secret_path` - Path in the vault engine where the credential is stored.
192+
* `ase_db_hashicorp_vault_username_key` - Key for the username in the key-value store.
193+
* `ase_db_hashicorp_vault_secret_key` - Key for the password in the key-value store.
194+
* `ase_db_cyberark_vault_query_string` - Query to find a credential in the CyberArk vault.
195+
* `ase_db_use_kerberos_authentication` - Whether to use kerberos authentication for ASE DB discovery.
196+
* `java_home` - The path to the user managed Java Development Kit (JDK). If not specified, then the OpenJDK will be used.
197+
* `dsp_keystore_path` - DSP keystore path.
198+
* `dsp_keystore_password` - DSP keystore password.
199+
* `dsp_keystore_alias` - DSP keystore alias.
200+
* `dsp_truststore_path` - DSP truststore path.
201+
* `dsp_truststore_password` - DSP truststore password.
202+
* `description` - The environment description.
203+
* `tags` - The tags to be created for this environment. This is a map of 2 parameters:
204204
* `key` - (Required) Key of the tag
205205
* `value` - (Required) Value of the tag
206206

0 commit comments

Comments
 (0)